Compare commits
5 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| bee7eb0fcb | |||
| 0acb030f46 | |||
| 658a1c8228 | |||
| 618316a2f7 | |||
| a9c2117aa7 |
-1
@@ -6,7 +6,6 @@ description: Learn about RBAC Grafana provisioning and view an example YAML prov
|
||||
file that configures Grafana role assignments.
|
||||
labels:
|
||||
products:
|
||||
- cloud
|
||||
- enterprise
|
||||
menuTitle: Provisioning RBAC with Grafana
|
||||
title: Provisioning RBAC with Grafana
|
||||
|
||||
@@ -347,6 +347,7 @@
|
||||
"date-fns": "4.1.0",
|
||||
"debounce-promise": "3.1.2",
|
||||
"diff": "^8.0.0",
|
||||
"downsample": "1.4.0",
|
||||
"fast-deep-equal": "^3.1.3",
|
||||
"fast-json-patch": "3.1.1",
|
||||
"file-saver": "2.0.5",
|
||||
|
||||
@@ -42,5 +42,6 @@ export enum DataTransformerID {
|
||||
formatTime = 'formatTime',
|
||||
formatString = 'formatString',
|
||||
regression = 'regression',
|
||||
smoothing = 'smoothing',
|
||||
groupToNestedTable = 'groupToNestedTable',
|
||||
}
|
||||
|
||||
@@ -1255,4 +1255,8 @@ export interface FeatureToggles {
|
||||
* Enables support for variables whose values can have multiple properties
|
||||
*/
|
||||
multiPropsVariables?: boolean;
|
||||
/**
|
||||
* Enables the ASAP smoothing transformation for time series data
|
||||
*/
|
||||
smoothingTransformation?: boolean;
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"net/http"
|
||||
"net/url"
|
||||
@@ -22,13 +21,6 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/pluginstore"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
"github.com/grafana/grafana/pkg/web"
|
||||
"github.com/open-feature/go-sdk/openfeature"
|
||||
)
|
||||
|
||||
var openfeatureClient = openfeature.NewDefaultClient()
|
||||
|
||||
const (
|
||||
pluginPageFeatureFlagPrefix = "plugin-page-visible."
|
||||
)
|
||||
|
||||
type AuthOptions struct {
|
||||
@@ -154,12 +146,6 @@ func RoleAppPluginAuth(accessControl ac.AccessControl, ps pluginstore.Store, log
|
||||
return
|
||||
}
|
||||
|
||||
if !PageIsFeatureToggleEnabled(c.Req.Context(), c.Req.URL.Path) {
|
||||
logger.Debug("Forbidden experimental plugin page", "plugin", pluginID, "path", c.Req.URL.Path)
|
||||
accessForbidden(c)
|
||||
return
|
||||
}
|
||||
|
||||
permitted := true
|
||||
path := normalizeIncludePath(c.Req.URL.Path)
|
||||
hasAccess := ac.HasAccess(accessControl, c)
|
||||
@@ -308,18 +294,3 @@ func shouldForceLogin(c *contextmodel.ReqContext) bool {
|
||||
|
||||
return forceLogin
|
||||
}
|
||||
|
||||
// PageIsFeatureToggleEnabled checks if a page is enabled via OpenFeature feature flags.
|
||||
// It returns false if the feature flag is set and set to false.
|
||||
// The feature flag key format is: "plugin-page-visible.<path>"
|
||||
func PageIsFeatureToggleEnabled(ctx context.Context, path string) bool {
|
||||
flagKey := pluginPageFeatureFlagPrefix + filepath.Clean(path)
|
||||
enabled := openfeatureClient.Boolean(
|
||||
ctx,
|
||||
flagKey,
|
||||
true,
|
||||
openfeature.TransactionContext(ctx),
|
||||
)
|
||||
|
||||
return enabled
|
||||
}
|
||||
|
||||
@@ -1,17 +1,12 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"sync"
|
||||
"testing"
|
||||
|
||||
"github.com/open-feature/go-sdk/openfeature"
|
||||
"github.com/open-feature/go-sdk/openfeature/memprovider"
|
||||
oftesting "github.com/open-feature/go-sdk/openfeature/testing"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
@@ -33,8 +28,6 @@ import (
|
||||
"github.com/grafana/grafana/pkg/web"
|
||||
)
|
||||
|
||||
var openfeatureTestMutex sync.Mutex
|
||||
|
||||
func setupAuthMiddlewareTest(t *testing.T, identity *authn.Identity, authErr error) *contexthandler.ContextHandler {
|
||||
return contexthandler.ProvideService(setting.NewCfg(), &authntest.FakeService{
|
||||
ExpectedErr: authErr,
|
||||
@@ -429,60 +422,6 @@ func TestCanAdminPlugin(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestPageIsFeatureToggleEnabled(t *testing.T) {
|
||||
type testCase struct {
|
||||
desc string
|
||||
path string
|
||||
flags map[string]bool
|
||||
expectedResult bool
|
||||
}
|
||||
|
||||
tests := []testCase{
|
||||
{
|
||||
desc: "returns true when feature flag is enabled",
|
||||
path: "/a/my-plugin/settings",
|
||||
flags: map[string]bool{
|
||||
pluginPageFeatureFlagPrefix + "/a/my-plugin/settings": true,
|
||||
},
|
||||
expectedResult: true,
|
||||
},
|
||||
{
|
||||
desc: "returns false when feature flag is disabled",
|
||||
path: "/a/my-plugin/settings",
|
||||
flags: map[string]bool{
|
||||
pluginPageFeatureFlagPrefix + "/a/my-plugin/settings": false,
|
||||
},
|
||||
expectedResult: false,
|
||||
},
|
||||
{
|
||||
desc: "returns false when feature flag is disabled with trailing slash",
|
||||
path: "/a/my-plugin/settings/",
|
||||
flags: map[string]bool{
|
||||
pluginPageFeatureFlagPrefix + "/a/my-plugin/settings": false,
|
||||
},
|
||||
expectedResult: false,
|
||||
},
|
||||
{
|
||||
desc: "returns true when feature flag does not exist",
|
||||
path: "/a/my-plugin/settings",
|
||||
flags: map[string]bool{},
|
||||
expectedResult: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.desc, func(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
setupTestProvider(t, tt.flags)
|
||||
|
||||
result := PageIsFeatureToggleEnabled(ctx, tt.path)
|
||||
|
||||
assert.Equal(t, tt.expectedResult, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func contextProvider(modifiers ...func(c *contextmodel.ReqContext)) web.Handler {
|
||||
return func(c *web.Context) {
|
||||
reqCtx := &contextmodel.ReqContext{
|
||||
@@ -498,38 +437,3 @@ func contextProvider(modifiers ...func(c *contextmodel.ReqContext)) web.Handler
|
||||
c.Req = c.Req.WithContext(ctxkey.Set(c.Req.Context(), reqCtx))
|
||||
}
|
||||
}
|
||||
|
||||
// setupTestProvider creates a test OpenFeature provider with the given flags.
|
||||
// Uses a global lock to prevent concurrent provider changes across tests.
|
||||
func setupTestProvider(t *testing.T, flags map[string]bool) oftesting.TestProvider {
|
||||
t.Helper()
|
||||
|
||||
// Lock to prevent concurrent provider changes
|
||||
openfeatureTestMutex.Lock()
|
||||
|
||||
testProvider := oftesting.NewTestProvider()
|
||||
flagsMap := map[string]memprovider.InMemoryFlag{}
|
||||
|
||||
for key, value := range flags {
|
||||
flagsMap[key] = memprovider.InMemoryFlag{
|
||||
DefaultVariant: "defaultVariant",
|
||||
Variants: map[string]any{
|
||||
"defaultVariant": value,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
testProvider.UsingFlags(t, flagsMap)
|
||||
|
||||
err := openfeature.SetProviderAndWait(testProvider)
|
||||
require.NoError(t, err)
|
||||
|
||||
t.Cleanup(func() {
|
||||
testProvider.Cleanup()
|
||||
_ = openfeature.SetProviderAndWait(openfeature.NoopProvider{})
|
||||
// Unlock after cleanup to allow other tests to run
|
||||
openfeatureTestMutex.Unlock()
|
||||
})
|
||||
|
||||
return testProvider
|
||||
}
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
package acimpl
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/services/accesscontrol"
|
||||
)
|
||||
|
||||
const (
|
||||
ossBasicRoleSeedLockName = "oss-ac-basic-role-seeder"
|
||||
ossBasicRoleSeedTimeout = 2 * time.Minute
|
||||
)
|
||||
|
||||
// refreshBasicRolePermissionsInDB ensures basic role permissions are fully derived from in-memory registrations
|
||||
func (s *Service) refreshBasicRolePermissionsInDB(ctx context.Context, rolesSnapshot map[string][]accesscontrol.Permission) error {
|
||||
if s.sql == nil || s.seeder == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
run := func(ctx context.Context) error {
|
||||
desired := map[accesscontrol.SeedPermission]struct{}{}
|
||||
for role, permissions := range rolesSnapshot {
|
||||
for _, permission := range permissions {
|
||||
desired[accesscontrol.SeedPermission{BuiltInRole: role, Action: permission.Action, Scope: permission.Scope}] = struct{}{}
|
||||
}
|
||||
}
|
||||
s.seeder.SetDesiredPermissions(desired)
|
||||
return s.seeder.Seed(ctx)
|
||||
}
|
||||
|
||||
if s.serverLock == nil {
|
||||
return run(ctx)
|
||||
}
|
||||
|
||||
var err error
|
||||
errLock := s.serverLock.LockExecuteAndRelease(ctx, ossBasicRoleSeedLockName, ossBasicRoleSeedTimeout, func(ctx context.Context) {
|
||||
err = run(ctx)
|
||||
})
|
||||
if errLock != nil {
|
||||
return errLock
|
||||
}
|
||||
return err
|
||||
}
|
||||
@@ -1,128 +0,0 @@
|
||||
package acimpl
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/db"
|
||||
"github.com/grafana/grafana/pkg/infra/localcache"
|
||||
"github.com/grafana/grafana/pkg/infra/tracing"
|
||||
"github.com/grafana/grafana/pkg/services/accesscontrol"
|
||||
"github.com/grafana/grafana/pkg/services/accesscontrol/database"
|
||||
"github.com/grafana/grafana/pkg/services/accesscontrol/permreg"
|
||||
"github.com/grafana/grafana/pkg/services/accesscontrol/resourcepermissions"
|
||||
"github.com/grafana/grafana/pkg/services/featuremgmt"
|
||||
"github.com/grafana/grafana/pkg/services/org"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
"github.com/grafana/grafana/pkg/util/testutil"
|
||||
)
|
||||
|
||||
func TestIntegration_OSSBasicRolePermissions_PersistAndRefreshOnRegisterFixedRoles(t *testing.T) {
|
||||
testutil.SkipIntegrationTestInShortMode(t)
|
||||
|
||||
ctx := context.Background()
|
||||
sql := db.InitTestDB(t)
|
||||
store := database.ProvideService(sql)
|
||||
|
||||
svc := ProvideOSSService(
|
||||
setting.NewCfg(),
|
||||
store,
|
||||
&resourcepermissions.FakeActionSetSvc{},
|
||||
localcache.ProvideService(),
|
||||
featuremgmt.WithFeatures(),
|
||||
tracing.InitializeTracerForTest(),
|
||||
sql,
|
||||
permreg.ProvidePermissionRegistry(),
|
||||
nil,
|
||||
)
|
||||
|
||||
require.NoError(t, svc.DeclareFixedRoles(accesscontrol.RoleRegistration{
|
||||
Role: accesscontrol.RoleDTO{
|
||||
Name: "fixed:test:role",
|
||||
Permissions: []accesscontrol.Permission{
|
||||
{Action: "test:read", Scope: ""},
|
||||
},
|
||||
},
|
||||
Grants: []string{string(org.RoleViewer)},
|
||||
}))
|
||||
|
||||
require.NoError(t, svc.RegisterFixedRoles(ctx))
|
||||
|
||||
// verify permission is persisted to DB for basic:viewer
|
||||
require.NoError(t, sql.WithDbSession(ctx, func(sess *db.Session) error {
|
||||
var role accesscontrol.Role
|
||||
ok, err := sess.Table("role").Where("uid = ?", accesscontrol.BasicRoleUIDPrefix+"viewer").Get(&role)
|
||||
require.NoError(t, err)
|
||||
require.True(t, ok)
|
||||
|
||||
var count int64
|
||||
count, err = sess.Table("permission").Where("role_id = ? AND action = ? AND scope = ?", role.ID, "test:read", "").Count()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, int64(1), count)
|
||||
return nil
|
||||
}))
|
||||
|
||||
// ensure RegisterFixedRoles refreshes it back to defaults
|
||||
require.NoError(t, sql.WithDbSession(ctx, func(sess *db.Session) error {
|
||||
ts := time.Now()
|
||||
var role accesscontrol.Role
|
||||
ok, err := sess.Table("role").Where("uid = ?", accesscontrol.BasicRoleUIDPrefix+"viewer").Get(&role)
|
||||
require.NoError(t, err)
|
||||
require.True(t, ok)
|
||||
|
||||
_, err = sess.Exec("DELETE FROM permission WHERE role_id = ?", role.ID)
|
||||
require.NoError(t, err)
|
||||
p := accesscontrol.Permission{
|
||||
RoleID: role.ID,
|
||||
Action: "custom:keep",
|
||||
Scope: "",
|
||||
Created: ts,
|
||||
Updated: ts,
|
||||
}
|
||||
p.Kind, p.Attribute, p.Identifier = accesscontrol.SplitScope(p.Scope)
|
||||
_, err = sess.Table("permission").Insert(&p)
|
||||
return err
|
||||
}))
|
||||
|
||||
svc2 := ProvideOSSService(
|
||||
setting.NewCfg(),
|
||||
store,
|
||||
&resourcepermissions.FakeActionSetSvc{},
|
||||
localcache.ProvideService(),
|
||||
featuremgmt.WithFeatures(),
|
||||
tracing.InitializeTracerForTest(),
|
||||
sql,
|
||||
permreg.ProvidePermissionRegistry(),
|
||||
nil,
|
||||
)
|
||||
require.NoError(t, svc2.DeclareFixedRoles(accesscontrol.RoleRegistration{
|
||||
Role: accesscontrol.RoleDTO{
|
||||
Name: "fixed:test:role",
|
||||
Permissions: []accesscontrol.Permission{
|
||||
{Action: "test:read", Scope: ""},
|
||||
},
|
||||
},
|
||||
Grants: []string{string(org.RoleViewer)},
|
||||
}))
|
||||
require.NoError(t, svc2.RegisterFixedRoles(ctx))
|
||||
|
||||
require.NoError(t, sql.WithDbSession(ctx, func(sess *db.Session) error {
|
||||
var role accesscontrol.Role
|
||||
ok, err := sess.Table("role").Where("uid = ?", accesscontrol.BasicRoleUIDPrefix+"viewer").Get(&role)
|
||||
require.NoError(t, err)
|
||||
require.True(t, ok)
|
||||
|
||||
var count int64
|
||||
count, err = sess.Table("permission").Where("role_id = ? AND action = ? AND scope = ?", role.ID, "test:read", "").Count()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, int64(1), count)
|
||||
|
||||
count, err = sess.Table("permission").Where("role_id = ? AND action = ?", role.ID, "custom:keep").Count()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, int64(0), count)
|
||||
return nil
|
||||
}))
|
||||
}
|
||||
@@ -30,7 +30,6 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/accesscontrol/migrator"
|
||||
"github.com/grafana/grafana/pkg/services/accesscontrol/permreg"
|
||||
"github.com/grafana/grafana/pkg/services/accesscontrol/pluginutils"
|
||||
"github.com/grafana/grafana/pkg/services/accesscontrol/seeding"
|
||||
"github.com/grafana/grafana/pkg/services/dashboards"
|
||||
"github.com/grafana/grafana/pkg/services/featuremgmt"
|
||||
"github.com/grafana/grafana/pkg/services/folder"
|
||||
@@ -97,12 +96,6 @@ func ProvideOSSService(
|
||||
roles: accesscontrol.BuildBasicRoleDefinitions(),
|
||||
store: store,
|
||||
permRegistry: permRegistry,
|
||||
sql: db,
|
||||
serverLock: lock,
|
||||
}
|
||||
|
||||
if backend, ok := store.(*database.AccessControlStore); ok {
|
||||
s.seeder = seeding.New(log.New("accesscontrol.seeder"), backend, backend)
|
||||
}
|
||||
|
||||
return s
|
||||
@@ -119,11 +112,8 @@ type Service struct {
|
||||
rolesMu sync.RWMutex
|
||||
roles map[string]*accesscontrol.RoleDTO
|
||||
store accesscontrol.Store
|
||||
seeder *seeding.Seeder
|
||||
permRegistry permreg.PermissionRegistry
|
||||
isInitialized bool
|
||||
sql db.DB
|
||||
serverLock *serverlock.ServerLockService
|
||||
}
|
||||
|
||||
func (s *Service) GetUsageStats(_ context.Context) map[string]any {
|
||||
@@ -441,54 +431,17 @@ func (s *Service) RegisterFixedRoles(ctx context.Context) error {
|
||||
defer span.End()
|
||||
|
||||
s.rolesMu.Lock()
|
||||
registrations := s.registrations.Slice()
|
||||
defer s.rolesMu.Unlock()
|
||||
|
||||
s.registrations.Range(func(registration accesscontrol.RoleRegistration) bool {
|
||||
s.registerRolesLocked(registration)
|
||||
return true
|
||||
})
|
||||
|
||||
s.isInitialized = true
|
||||
|
||||
rolesSnapshot := s.getBasicRolePermissionsLocked()
|
||||
s.rolesMu.Unlock()
|
||||
|
||||
if s.seeder != nil {
|
||||
if err := s.seeder.SeedRoles(ctx, registrations); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := s.seeder.RemoveAbsentRoles(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := s.refreshBasicRolePermissionsInDB(ctx, rolesSnapshot); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// getBasicRolePermissionsSnapshotFromRegistrationsLocked computes the desired basic role permissions from the
|
||||
// current registration list, using the shared seeding registration logic.
|
||||
//
|
||||
// it has to be called while holding the roles lock
|
||||
func (s *Service) getBasicRolePermissionsLocked() map[string][]accesscontrol.Permission {
|
||||
desired := map[accesscontrol.SeedPermission]struct{}{}
|
||||
s.registrations.Range(func(registration accesscontrol.RoleRegistration) bool {
|
||||
seeding.AppendDesiredPermissions(desired, s.log, ®istration.Role, registration.Grants, registration.Exclude, true)
|
||||
return true
|
||||
})
|
||||
|
||||
out := make(map[string][]accesscontrol.Permission)
|
||||
for sp := range desired {
|
||||
out[sp.BuiltInRole] = append(out[sp.BuiltInRole], accesscontrol.Permission{
|
||||
Action: sp.Action,
|
||||
Scope: sp.Scope,
|
||||
})
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// registerRolesLocked processes a single role registration and adds permissions to basic roles.
|
||||
// Must be called with s.rolesMu locked.
|
||||
func (s *Service) registerRolesLocked(registration accesscontrol.RoleRegistration) {
|
||||
@@ -521,7 +474,6 @@ func (s *Service) DeclarePluginRoles(ctx context.Context, ID, name string, regs
|
||||
defer span.End()
|
||||
|
||||
acRegs := pluginutils.ToRegistrations(ID, name, regs)
|
||||
updatedBasicRoles := false
|
||||
for _, r := range acRegs {
|
||||
if err := pluginutils.ValidatePluginRole(ID, r.Role); err != nil {
|
||||
return err
|
||||
@@ -548,23 +500,11 @@ func (s *Service) DeclarePluginRoles(ctx context.Context, ID, name string, regs
|
||||
if initialized {
|
||||
s.rolesMu.Lock()
|
||||
s.registerRolesLocked(r)
|
||||
updatedBasicRoles = true
|
||||
s.rolesMu.Unlock()
|
||||
s.cache.Flush()
|
||||
}
|
||||
}
|
||||
|
||||
if updatedBasicRoles {
|
||||
s.rolesMu.RLock()
|
||||
rolesSnapshot := s.getBasicRolePermissionsLocked()
|
||||
s.rolesMu.RUnlock()
|
||||
|
||||
// plugin roles can be declared after startup - keep DB in sync
|
||||
if err := s.refreshBasicRolePermissionsInDB(ctx, rolesSnapshot); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -1,623 +0,0 @@
|
||||
package database
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/db"
|
||||
"github.com/grafana/grafana/pkg/services/accesscontrol"
|
||||
"github.com/grafana/grafana/pkg/services/accesscontrol/seeding"
|
||||
"github.com/grafana/grafana/pkg/services/sqlstore/migrator"
|
||||
"github.com/grafana/grafana/pkg/util/xorm/core"
|
||||
)
|
||||
|
||||
const basicRolePermBatchSize = 500
|
||||
|
||||
// LoadRoles returns all fixed and plugin roles (global org) with permissions, indexed by role name.
|
||||
func (s *AccessControlStore) LoadRoles(ctx context.Context) (map[string]*accesscontrol.RoleDTO, error) {
|
||||
out := map[string]*accesscontrol.RoleDTO{}
|
||||
|
||||
err := s.sql.WithDbSession(ctx, func(sess *db.Session) error {
|
||||
type roleRow struct {
|
||||
ID int64 `xorm:"id"`
|
||||
OrgID int64 `xorm:"org_id"`
|
||||
Version int64 `xorm:"version"`
|
||||
UID string `xorm:"uid"`
|
||||
Name string `xorm:"name"`
|
||||
DisplayName string `xorm:"display_name"`
|
||||
Description string `xorm:"description"`
|
||||
Group string `xorm:"group_name"`
|
||||
Hidden bool `xorm:"hidden"`
|
||||
Updated time.Time `xorm:"updated"`
|
||||
Created time.Time `xorm:"created"`
|
||||
}
|
||||
|
||||
roles := []roleRow{}
|
||||
if err := sess.Table("role").
|
||||
Where("org_id = ?", accesscontrol.GlobalOrgID).
|
||||
Where("(name LIKE ? OR name LIKE ?)", accesscontrol.FixedRolePrefix+"%", accesscontrol.PluginRolePrefix+"%").
|
||||
Find(&roles); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(roles) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
roleIDs := make([]any, 0, len(roles))
|
||||
roleByID := make(map[int64]*accesscontrol.RoleDTO, len(roles))
|
||||
for _, r := range roles {
|
||||
dto := &accesscontrol.RoleDTO{
|
||||
ID: r.ID,
|
||||
OrgID: r.OrgID,
|
||||
Version: r.Version,
|
||||
UID: r.UID,
|
||||
Name: r.Name,
|
||||
DisplayName: r.DisplayName,
|
||||
Description: r.Description,
|
||||
Group: r.Group,
|
||||
Hidden: r.Hidden,
|
||||
Updated: r.Updated,
|
||||
Created: r.Created,
|
||||
}
|
||||
out[dto.Name] = dto
|
||||
roleByID[dto.ID] = dto
|
||||
roleIDs = append(roleIDs, dto.ID)
|
||||
}
|
||||
|
||||
type permRow struct {
|
||||
RoleID int64 `xorm:"role_id"`
|
||||
Action string `xorm:"action"`
|
||||
Scope string `xorm:"scope"`
|
||||
}
|
||||
perms := []permRow{}
|
||||
if err := sess.Table("permission").In("role_id", roleIDs...).Find(&perms); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, p := range perms {
|
||||
dto := roleByID[p.RoleID]
|
||||
if dto == nil {
|
||||
continue
|
||||
}
|
||||
dto.Permissions = append(dto.Permissions, accesscontrol.Permission{
|
||||
RoleID: p.RoleID,
|
||||
Action: p.Action,
|
||||
Scope: p.Scope,
|
||||
})
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
return out, err
|
||||
}
|
||||
|
||||
func (s *AccessControlStore) SetRole(ctx context.Context, existingRole *accesscontrol.RoleDTO, wantedRole accesscontrol.RoleDTO) error {
|
||||
if existingRole == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
return s.sql.WithDbSession(ctx, func(sess *db.Session) error {
|
||||
_, err := sess.Table("role").
|
||||
Where("id = ? AND org_id = ?", existingRole.ID, accesscontrol.GlobalOrgID).
|
||||
Update(map[string]any{
|
||||
"display_name": wantedRole.DisplayName,
|
||||
"description": wantedRole.Description,
|
||||
"group_name": wantedRole.Group,
|
||||
"hidden": wantedRole.Hidden,
|
||||
"updated": time.Now(),
|
||||
})
|
||||
return err
|
||||
})
|
||||
}
|
||||
|
||||
func (s *AccessControlStore) SetPermissions(ctx context.Context, existingRole *accesscontrol.RoleDTO, wantedRole accesscontrol.RoleDTO) error {
|
||||
if existingRole == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
type key struct{ Action, Scope string }
|
||||
existing := map[key]struct{}{}
|
||||
for _, p := range existingRole.Permissions {
|
||||
existing[key{p.Action, p.Scope}] = struct{}{}
|
||||
}
|
||||
desired := map[key]struct{}{}
|
||||
for _, p := range wantedRole.Permissions {
|
||||
desired[key{p.Action, p.Scope}] = struct{}{}
|
||||
}
|
||||
|
||||
toAdd := make([]accesscontrol.Permission, 0)
|
||||
toRemove := make([]accesscontrol.SeedPermission, 0)
|
||||
|
||||
now := time.Now()
|
||||
for k := range desired {
|
||||
if _, ok := existing[k]; ok {
|
||||
continue
|
||||
}
|
||||
perm := accesscontrol.Permission{
|
||||
RoleID: existingRole.ID,
|
||||
Action: k.Action,
|
||||
Scope: k.Scope,
|
||||
Created: now,
|
||||
Updated: now,
|
||||
}
|
||||
perm.Kind, perm.Attribute, perm.Identifier = accesscontrol.SplitScope(perm.Scope)
|
||||
toAdd = append(toAdd, perm)
|
||||
}
|
||||
|
||||
for k := range existing {
|
||||
if _, ok := desired[k]; ok {
|
||||
continue
|
||||
}
|
||||
toRemove = append(toRemove, accesscontrol.SeedPermission{Action: k.Action, Scope: k.Scope})
|
||||
}
|
||||
|
||||
if len(toAdd) == 0 && len(toRemove) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
return s.sql.WithTransactionalDbSession(ctx, func(sess *db.Session) error {
|
||||
if len(toRemove) > 0 {
|
||||
if err := DeleteRolePermissionTuples(sess, s.sql.GetDBType(), existingRole.ID, toRemove); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if len(toAdd) > 0 {
|
||||
_, err := sess.InsertMulti(toAdd)
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func (s *AccessControlStore) CreateRole(ctx context.Context, role accesscontrol.RoleDTO) error {
|
||||
now := time.Now()
|
||||
uid := role.UID
|
||||
if uid == "" && (strings.HasPrefix(role.Name, accesscontrol.FixedRolePrefix) || strings.HasPrefix(role.Name, accesscontrol.PluginRolePrefix)) {
|
||||
uid = accesscontrol.PrefixedRoleUID(role.Name)
|
||||
}
|
||||
r := accesscontrol.Role{
|
||||
OrgID: accesscontrol.GlobalOrgID,
|
||||
Version: role.Version,
|
||||
UID: uid,
|
||||
Name: role.Name,
|
||||
DisplayName: role.DisplayName,
|
||||
Description: role.Description,
|
||||
Group: role.Group,
|
||||
Hidden: role.Hidden,
|
||||
Created: now,
|
||||
Updated: now,
|
||||
}
|
||||
if r.Version == 0 {
|
||||
r.Version = 1
|
||||
}
|
||||
|
||||
return s.sql.WithTransactionalDbSession(ctx, func(sess *db.Session) error {
|
||||
if _, err := sess.Insert(&r); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(role.Permissions) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// De-duplicate permissions on (action, scope) to avoid unique constraint violations.
|
||||
// Some role definitions may accidentally include duplicates.
|
||||
type permKey struct{ Action, Scope string }
|
||||
seen := make(map[permKey]struct{}, len(role.Permissions))
|
||||
|
||||
perms := make([]accesscontrol.Permission, 0, len(role.Permissions))
|
||||
for _, p := range role.Permissions {
|
||||
k := permKey{Action: p.Action, Scope: p.Scope}
|
||||
if _, ok := seen[k]; ok {
|
||||
continue
|
||||
}
|
||||
seen[k] = struct{}{}
|
||||
|
||||
perm := accesscontrol.Permission{
|
||||
RoleID: r.ID,
|
||||
Action: p.Action,
|
||||
Scope: p.Scope,
|
||||
Created: now,
|
||||
Updated: now,
|
||||
}
|
||||
perm.Kind, perm.Attribute, perm.Identifier = accesscontrol.SplitScope(perm.Scope)
|
||||
perms = append(perms, perm)
|
||||
}
|
||||
_, err := sess.InsertMulti(perms)
|
||||
return err
|
||||
})
|
||||
}
|
||||
|
||||
func (s *AccessControlStore) DeleteRoles(ctx context.Context, roleUIDs []string) error {
|
||||
if len(roleUIDs) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
uids := make([]any, 0, len(roleUIDs))
|
||||
for _, uid := range roleUIDs {
|
||||
uids = append(uids, uid)
|
||||
}
|
||||
|
||||
return s.sql.WithTransactionalDbSession(ctx, func(sess *db.Session) error {
|
||||
type row struct {
|
||||
ID int64 `xorm:"id"`
|
||||
UID string `xorm:"uid"`
|
||||
}
|
||||
rows := []row{}
|
||||
if err := sess.Table("role").
|
||||
Where("org_id = ?", accesscontrol.GlobalOrgID).
|
||||
In("uid", uids...).
|
||||
Find(&rows); err != nil {
|
||||
return err
|
||||
}
|
||||
if len(rows) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
roleIDs := make([]any, 0, len(rows))
|
||||
for _, r := range rows {
|
||||
roleIDs = append(roleIDs, r.ID)
|
||||
}
|
||||
|
||||
// Remove permissions and assignments first to avoid FK issues (if enabled).
|
||||
{
|
||||
args := append([]any{"DELETE FROM permission WHERE role_id IN (?" + strings.Repeat(",?", len(roleIDs)-1) + ")"}, roleIDs...)
|
||||
if _, err := sess.Exec(args...); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
{
|
||||
args := append([]any{"DELETE FROM user_role WHERE role_id IN (?" + strings.Repeat(",?", len(roleIDs)-1) + ")"}, roleIDs...)
|
||||
if _, err := sess.Exec(args...); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
{
|
||||
args := append([]any{"DELETE FROM team_role WHERE role_id IN (?" + strings.Repeat(",?", len(roleIDs)-1) + ")"}, roleIDs...)
|
||||
if _, err := sess.Exec(args...); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
{
|
||||
args := append([]any{"DELETE FROM builtin_role WHERE role_id IN (?" + strings.Repeat(",?", len(roleIDs)-1) + ")"}, roleIDs...)
|
||||
if _, err := sess.Exec(args...); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
args := append([]any{"DELETE FROM role WHERE org_id = ? AND uid IN (?" + strings.Repeat(",?", len(uids)-1) + ")", accesscontrol.GlobalOrgID}, uids...)
|
||||
_, err := sess.Exec(args...)
|
||||
return err
|
||||
})
|
||||
}
|
||||
|
||||
// OSS basic-role permission refresh uses seeding.Seeder.Seed() with a desired set computed in memory.
|
||||
// These methods implement the permission seeding part of seeding.SeedingBackend against the current permission table.
|
||||
func (s *AccessControlStore) LoadPrevious(ctx context.Context) (map[accesscontrol.SeedPermission]struct{}, error) {
|
||||
var out map[accesscontrol.SeedPermission]struct{}
|
||||
err := s.sql.WithDbSession(ctx, func(sess *db.Session) error {
|
||||
rows, err := LoadBasicRoleSeedPermissions(sess)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
out = make(map[accesscontrol.SeedPermission]struct{}, len(rows))
|
||||
for _, r := range rows {
|
||||
r.Origin = ""
|
||||
out[r] = struct{}{}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
return out, err
|
||||
}
|
||||
|
||||
func (s *AccessControlStore) Apply(ctx context.Context, added, removed []accesscontrol.SeedPermission, updated map[accesscontrol.SeedPermission]accesscontrol.SeedPermission) error {
|
||||
rolesToUpgrade := seeding.RolesToUpgrade(added, removed)
|
||||
|
||||
// Run the same OSS apply logic as ossBasicRoleSeedBackend.Apply inside a single transaction.
|
||||
return s.sql.WithTransactionalDbSession(ctx, func(sess *db.Session) error {
|
||||
defs := accesscontrol.BuildBasicRoleDefinitions()
|
||||
builtinToRoleID, err := EnsureBasicRolesExist(sess, defs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
backend := &ossBasicRoleSeedBackend{
|
||||
sess: sess,
|
||||
now: time.Now(),
|
||||
builtinToRoleID: builtinToRoleID,
|
||||
desired: nil,
|
||||
dbType: s.sql.GetDBType(),
|
||||
}
|
||||
if err := backend.Apply(ctx, added, removed, updated); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return BumpBasicRoleVersions(sess, rolesToUpgrade)
|
||||
})
|
||||
}
|
||||
|
||||
// EnsureBasicRolesExist ensures the built-in basic roles exist in the role table and are bound in builtin_role.
|
||||
// It returns a mapping from builtin role name (for example "Admin") to role ID.
|
||||
func EnsureBasicRolesExist(sess *db.Session, defs map[string]*accesscontrol.RoleDTO) (map[string]int64, error) {
|
||||
uidToBuiltin := make(map[string]string, len(defs))
|
||||
uids := make([]any, 0, len(defs))
|
||||
for builtin, def := range defs {
|
||||
uidToBuiltin[def.UID] = builtin
|
||||
uids = append(uids, def.UID)
|
||||
}
|
||||
|
||||
type roleRow struct {
|
||||
ID int64 `xorm:"id"`
|
||||
UID string `xorm:"uid"`
|
||||
}
|
||||
|
||||
rows := []roleRow{}
|
||||
if err := sess.Table("role").
|
||||
Where("org_id = ?", accesscontrol.GlobalOrgID).
|
||||
In("uid", uids...).
|
||||
Find(&rows); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ts := time.Now()
|
||||
|
||||
builtinToRoleID := make(map[string]int64, len(defs))
|
||||
for _, r := range rows {
|
||||
br, ok := uidToBuiltin[r.UID]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
builtinToRoleID[br] = r.ID
|
||||
}
|
||||
|
||||
for builtin, def := range defs {
|
||||
roleID, ok := builtinToRoleID[builtin]
|
||||
if !ok {
|
||||
role := accesscontrol.Role{
|
||||
OrgID: def.OrgID,
|
||||
Version: def.Version,
|
||||
UID: def.UID,
|
||||
Name: def.Name,
|
||||
DisplayName: def.DisplayName,
|
||||
Description: def.Description,
|
||||
Group: def.Group,
|
||||
Hidden: def.Hidden,
|
||||
Created: ts,
|
||||
Updated: ts,
|
||||
}
|
||||
if _, err := sess.Insert(&role); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
roleID = role.ID
|
||||
builtinToRoleID[builtin] = roleID
|
||||
}
|
||||
|
||||
has, err := sess.Table("builtin_role").
|
||||
Where("role_id = ? AND role = ? AND org_id = ?", roleID, builtin, accesscontrol.GlobalOrgID).
|
||||
Exist()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !has {
|
||||
br := accesscontrol.BuiltinRole{
|
||||
RoleID: roleID,
|
||||
OrgID: accesscontrol.GlobalOrgID,
|
||||
Role: builtin,
|
||||
Created: ts,
|
||||
Updated: ts,
|
||||
}
|
||||
if _, err := sess.Table("builtin_role").Insert(&br); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return builtinToRoleID, nil
|
||||
}
|
||||
|
||||
// DeleteRolePermissionTuples deletes permissions for a single role by (action, scope) pairs.
|
||||
//
|
||||
// It uses a row-constructor IN clause where supported (MySQL, Postgres, SQLite) and falls back
|
||||
// to a WHERE ... OR ... form for MSSQL.
|
||||
func DeleteRolePermissionTuples(sess *db.Session, dbType core.DbType, roleID int64, perms []accesscontrol.SeedPermission) error {
|
||||
if len(perms) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
if dbType == migrator.MSSQL {
|
||||
// MSSQL doesn't support (action, scope) IN ((?,?),(?,?)) row constructors.
|
||||
where := make([]string, 0, len(perms))
|
||||
args := make([]any, 0, 1+len(perms)*2)
|
||||
args = append(args, roleID)
|
||||
for _, p := range perms {
|
||||
where = append(where, "(action = ? AND scope = ?)")
|
||||
args = append(args, p.Action, p.Scope)
|
||||
}
|
||||
_, err := sess.Exec(
|
||||
append([]any{
|
||||
"DELETE FROM permission WHERE role_id = ? AND (" + strings.Join(where, " OR ") + ")",
|
||||
}, args...)...,
|
||||
)
|
||||
return err
|
||||
}
|
||||
|
||||
args := make([]any, 0, 1+len(perms)*2)
|
||||
args = append(args, roleID)
|
||||
for _, p := range perms {
|
||||
args = append(args, p.Action, p.Scope)
|
||||
}
|
||||
sql := "DELETE FROM permission WHERE role_id = ? AND (action, scope) IN (" +
|
||||
strings.Repeat("(?, ?),", len(perms)-1) + "(?, ?))"
|
||||
_, err := sess.Exec(append([]any{sql}, args...)...)
|
||||
return err
|
||||
}
|
||||
|
||||
type ossBasicRoleSeedBackend struct {
|
||||
sess *db.Session
|
||||
now time.Time
|
||||
builtinToRoleID map[string]int64
|
||||
desired map[accesscontrol.SeedPermission]struct{}
|
||||
dbType core.DbType
|
||||
}
|
||||
|
||||
func (b *ossBasicRoleSeedBackend) LoadPrevious(_ context.Context) (map[accesscontrol.SeedPermission]struct{}, error) {
|
||||
rows, err := LoadBasicRoleSeedPermissions(b.sess)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
out := make(map[accesscontrol.SeedPermission]struct{}, len(rows))
|
||||
for _, r := range rows {
|
||||
// Ensure the key matches what OSS seeding uses (Origin is always empty for basic role refresh).
|
||||
r.Origin = ""
|
||||
out[r] = struct{}{}
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (b *ossBasicRoleSeedBackend) LoadDesired(_ context.Context) (map[accesscontrol.SeedPermission]struct{}, error) {
|
||||
return b.desired, nil
|
||||
}
|
||||
|
||||
func (b *ossBasicRoleSeedBackend) Apply(_ context.Context, added, removed []accesscontrol.SeedPermission, updated map[accesscontrol.SeedPermission]accesscontrol.SeedPermission) error {
|
||||
// Delete removed permissions (this includes user-defined permissions that aren't in desired).
|
||||
if len(removed) > 0 {
|
||||
permsByRoleID := map[int64][]accesscontrol.SeedPermission{}
|
||||
for _, p := range removed {
|
||||
roleID, ok := b.builtinToRoleID[p.BuiltInRole]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
permsByRoleID[roleID] = append(permsByRoleID[roleID], p)
|
||||
}
|
||||
|
||||
for roleID, perms := range permsByRoleID {
|
||||
// Chunk to keep statement sizes and parameter counts bounded.
|
||||
if err := batch(len(perms), basicRolePermBatchSize, func(start, end int) error {
|
||||
return DeleteRolePermissionTuples(b.sess, b.dbType, roleID, perms[start:end])
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Insert added permissions and updated-target permissions.
|
||||
toInsertSeed := make([]accesscontrol.SeedPermission, 0, len(added)+len(updated))
|
||||
toInsertSeed = append(toInsertSeed, added...)
|
||||
for _, v := range updated {
|
||||
toInsertSeed = append(toInsertSeed, v)
|
||||
}
|
||||
if len(toInsertSeed) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// De-duplicate on (role_id, action, scope). This avoids unique constraint violations when:
|
||||
// - the same permission appears in both added and updated
|
||||
// - multiple plugin origins grant the same permission (Origin is not persisted in permission table)
|
||||
type permKey struct {
|
||||
RoleID int64
|
||||
Action string
|
||||
Scope string
|
||||
}
|
||||
seen := make(map[permKey]struct{}, len(toInsertSeed))
|
||||
|
||||
toInsert := make([]accesscontrol.Permission, 0, len(toInsertSeed))
|
||||
for _, p := range toInsertSeed {
|
||||
roleID, ok := b.builtinToRoleID[p.BuiltInRole]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
k := permKey{RoleID: roleID, Action: p.Action, Scope: p.Scope}
|
||||
if _, ok := seen[k]; ok {
|
||||
continue
|
||||
}
|
||||
seen[k] = struct{}{}
|
||||
|
||||
perm := accesscontrol.Permission{
|
||||
RoleID: roleID,
|
||||
Action: p.Action,
|
||||
Scope: p.Scope,
|
||||
Created: b.now,
|
||||
Updated: b.now,
|
||||
}
|
||||
perm.Kind, perm.Attribute, perm.Identifier = accesscontrol.SplitScope(perm.Scope)
|
||||
toInsert = append(toInsert, perm)
|
||||
}
|
||||
|
||||
return batch(len(toInsert), basicRolePermBatchSize, func(start, end int) error {
|
||||
// MySQL: ignore conflicts to make seeding idempotent under retries/concurrency.
|
||||
// Conflicts can happen if the same permission already exists (unique on role_id, action, scope).
|
||||
if b.dbType == migrator.MySQL {
|
||||
args := make([]any, 0, (end-start)*8)
|
||||
for i := start; i < end; i++ {
|
||||
p := toInsert[i]
|
||||
args = append(args, p.RoleID, p.Action, p.Scope, p.Kind, p.Attribute, p.Identifier, p.Updated, p.Created)
|
||||
}
|
||||
sql := append([]any{`INSERT IGNORE INTO permission (role_id, action, scope, kind, attribute, identifier, updated, created) VALUES ` +
|
||||
strings.Repeat("(?, ?, ?, ?, ?, ?, ?, ?),", end-start-1) + "(?, ?, ?, ?, ?, ?, ?, ?)"}, args...)
|
||||
_, err := b.sess.Exec(sql...)
|
||||
return err
|
||||
}
|
||||
|
||||
_, err := b.sess.InsertMulti(toInsert[start:end])
|
||||
return err
|
||||
})
|
||||
}
|
||||
|
||||
func batch(count, size int, eachFn func(start, end int) error) error {
|
||||
for i := 0; i < count; {
|
||||
end := i + size
|
||||
if end > count {
|
||||
end = count
|
||||
}
|
||||
if err := eachFn(i, end); err != nil {
|
||||
return err
|
||||
}
|
||||
i = end
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// BumpBasicRoleVersions increments the role version for the given builtin basic roles (Viewer/Editor/Admin/Grafana Admin).
|
||||
// Unknown role names are ignored.
|
||||
func BumpBasicRoleVersions(sess *db.Session, basicRoles []string) error {
|
||||
if len(basicRoles) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
defs := accesscontrol.BuildBasicRoleDefinitions()
|
||||
uids := make([]any, 0, len(basicRoles))
|
||||
for _, br := range basicRoles {
|
||||
def, ok := defs[br]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
uids = append(uids, def.UID)
|
||||
}
|
||||
if len(uids) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
sql := "UPDATE role SET version = version + 1 WHERE org_id = ? AND uid IN (?" + strings.Repeat(",?", len(uids)-1) + ")"
|
||||
_, err := sess.Exec(append([]any{sql, accesscontrol.GlobalOrgID}, uids...)...)
|
||||
return err
|
||||
}
|
||||
|
||||
// LoadBasicRoleSeedPermissions returns the current (builtin_role, action, scope) permissions granted to basic roles.
|
||||
// It sets Origin to empty.
|
||||
func LoadBasicRoleSeedPermissions(sess *db.Session) ([]accesscontrol.SeedPermission, error) {
|
||||
rows := []accesscontrol.SeedPermission{}
|
||||
err := sess.SQL(
|
||||
`SELECT role.display_name AS builtin_role, p.action, p.scope, '' AS origin
|
||||
FROM role INNER JOIN permission AS p ON p.role_id = role.id
|
||||
WHERE role.org_id = ? AND role.name LIKE 'basic:%'`,
|
||||
accesscontrol.GlobalOrgID,
|
||||
).Find(&rows)
|
||||
return rows, err
|
||||
}
|
||||
@@ -15,7 +15,6 @@ import (
|
||||
"github.com/grafana/grafana/pkg/infra/db"
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/infra/serverlock"
|
||||
"github.com/grafana/grafana/pkg/services/accesscontrol"
|
||||
"github.com/grafana/grafana/pkg/services/authz/zanzana"
|
||||
"github.com/grafana/grafana/pkg/services/featuremgmt"
|
||||
"github.com/grafana/grafana/pkg/services/folder"
|
||||
@@ -131,9 +130,6 @@ func (r *ZanzanaReconciler) Run(ctx context.Context) error {
|
||||
// Reconcile schedules as job that will run and reconcile resources between
|
||||
// legacy access control and zanzana.
|
||||
func (r *ZanzanaReconciler) Reconcile(ctx context.Context) error {
|
||||
// Ensure we don't reconcile an empty/partial RBAC state before OSS has seeded basic role permissions.
|
||||
// This matters most during startup where fixed-role loading + basic-role permission refresh runs as another background service.
|
||||
r.waitForBasicRolesSeeded(ctx)
|
||||
r.reconcile(ctx)
|
||||
|
||||
// FIXME:
|
||||
@@ -149,57 +145,6 @@ func (r *ZanzanaReconciler) Reconcile(ctx context.Context) error {
|
||||
}
|
||||
}
|
||||
|
||||
func (r *ZanzanaReconciler) hasBasicRolePermissions(ctx context.Context) bool {
|
||||
var count int64
|
||||
// Basic role permissions are stored on "basic:%" roles in the global org (0).
|
||||
// In a fresh DB, this will be empty until fixed roles are registered and the basic role permission refresh runs.
|
||||
type row struct {
|
||||
Count int64 `xorm:"count"`
|
||||
}
|
||||
_ = r.store.WithDbSession(ctx, func(sess *db.Session) error {
|
||||
var rr row
|
||||
_, err := sess.SQL(
|
||||
`SELECT COUNT(*) AS count
|
||||
FROM role INNER JOIN permission AS p ON p.role_id = role.id
|
||||
WHERE role.org_id = ? AND role.name LIKE ?`,
|
||||
accesscontrol.GlobalOrgID,
|
||||
accesscontrol.BasicRolePrefix+"%",
|
||||
).Get(&rr)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
count = rr.Count
|
||||
return nil
|
||||
})
|
||||
return count > 0
|
||||
}
|
||||
|
||||
func (r *ZanzanaReconciler) waitForBasicRolesSeeded(ctx context.Context) {
|
||||
// Best-effort: don't block forever. If we can't observe basic roles, proceed anyway.
|
||||
const (
|
||||
maxWait = 15 * time.Second
|
||||
interval = 1 * time.Second
|
||||
)
|
||||
|
||||
deadline := time.NewTimer(maxWait)
|
||||
defer deadline.Stop()
|
||||
ticker := time.NewTicker(interval)
|
||||
defer ticker.Stop()
|
||||
|
||||
for {
|
||||
if r.hasBasicRolePermissions(ctx) {
|
||||
return
|
||||
}
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
case <-deadline.C:
|
||||
return
|
||||
case <-ticker.C:
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (r *ZanzanaReconciler) reconcile(ctx context.Context) {
|
||||
run := func(ctx context.Context, namespace string) (ok bool) {
|
||||
now := time.Now()
|
||||
|
||||
@@ -1,67 +0,0 @@
|
||||
package dualwrite
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/db"
|
||||
"github.com/grafana/grafana/pkg/services/accesscontrol"
|
||||
)
|
||||
|
||||
func TestZanzanaReconciler_hasBasicRolePermissions(t *testing.T) {
|
||||
env := setupTestEnv(t)
|
||||
|
||||
r := &ZanzanaReconciler{
|
||||
store: env.db,
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
require.False(t, r.hasBasicRolePermissions(ctx))
|
||||
|
||||
err := env.db.WithDbSession(ctx, func(sess *db.Session) error {
|
||||
now := time.Now()
|
||||
|
||||
_, err := sess.Exec(
|
||||
`INSERT INTO role (org_id, uid, name, display_name, group_name, description, hidden, version, created, updated)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
accesscontrol.GlobalOrgID,
|
||||
"basic_viewer_uid_test",
|
||||
accesscontrol.BasicRolePrefix+"viewer",
|
||||
"Viewer",
|
||||
"Basic",
|
||||
"Viewer role",
|
||||
false,
|
||||
1,
|
||||
now,
|
||||
now,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var roleID int64
|
||||
if _, err := sess.SQL(`SELECT id FROM role WHERE org_id = ? AND uid = ?`, accesscontrol.GlobalOrgID, "basic_viewer_uid_test").Get(&roleID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = sess.Exec(
|
||||
`INSERT INTO permission (role_id, action, scope, kind, attribute, identifier, created, updated)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
roleID,
|
||||
"dashboards:read",
|
||||
"dashboards:*",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
now,
|
||||
now,
|
||||
)
|
||||
return err
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
require.True(t, r.hasBasicRolePermissions(ctx))
|
||||
}
|
||||
@@ -1,7 +1,6 @@
|
||||
package accesscontrol
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
@@ -595,18 +594,3 @@ type QueryWithOrg struct {
|
||||
OrgId *int64 `json:"orgId"`
|
||||
Global bool `json:"global"`
|
||||
}
|
||||
|
||||
type SeedPermission struct {
|
||||
BuiltInRole string `xorm:"builtin_role"`
|
||||
Action string `xorm:"action"`
|
||||
Scope string `xorm:"scope"`
|
||||
Origin string `xorm:"origin"`
|
||||
}
|
||||
|
||||
type RoleStore interface {
|
||||
LoadRoles(ctx context.Context) (map[string]*RoleDTO, error)
|
||||
SetRole(ctx context.Context, existingRole *RoleDTO, wantedRole RoleDTO) error
|
||||
SetPermissions(ctx context.Context, existingRole *RoleDTO, wantedRole RoleDTO) error
|
||||
CreateRole(ctx context.Context, role RoleDTO) error
|
||||
DeleteRoles(ctx context.Context, roleUIDs []string) error
|
||||
}
|
||||
|
||||
@@ -1,451 +0,0 @@
|
||||
package seeding
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/services/accesscontrol"
|
||||
"github.com/grafana/grafana/pkg/services/accesscontrol/pluginutils"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/pluginaccesscontrol"
|
||||
)
|
||||
|
||||
type Seeder struct {
|
||||
log log.Logger
|
||||
roleStore accesscontrol.RoleStore
|
||||
backend SeedingBackend
|
||||
builtinsPermissions map[accesscontrol.SeedPermission]struct{}
|
||||
seededFixedRoles map[string]bool
|
||||
seededPluginRoles map[string]bool
|
||||
seededPlugins map[string]bool
|
||||
hasSeededAlready bool
|
||||
}
|
||||
|
||||
// SeedingBackend provides the seed-set specific operations needed to seed.
|
||||
type SeedingBackend interface {
|
||||
// LoadPrevious returns the currently stored permissions for previously seeded roles.
|
||||
LoadPrevious(ctx context.Context) (map[accesscontrol.SeedPermission]struct{}, error)
|
||||
|
||||
// Apply updates the database to match the desired permissions.
|
||||
Apply(ctx context.Context,
|
||||
added, removed []accesscontrol.SeedPermission,
|
||||
updated map[accesscontrol.SeedPermission]accesscontrol.SeedPermission,
|
||||
) error
|
||||
}
|
||||
|
||||
func New(log log.Logger, roleStore accesscontrol.RoleStore, backend SeedingBackend) *Seeder {
|
||||
return &Seeder{
|
||||
log: log,
|
||||
roleStore: roleStore,
|
||||
backend: backend,
|
||||
builtinsPermissions: map[accesscontrol.SeedPermission]struct{}{},
|
||||
seededFixedRoles: map[string]bool{},
|
||||
seededPluginRoles: map[string]bool{},
|
||||
seededPlugins: map[string]bool{},
|
||||
hasSeededAlready: false,
|
||||
}
|
||||
}
|
||||
|
||||
// SetDesiredPermissions replaces the in-memory desired permission set used by Seed().
|
||||
func (s *Seeder) SetDesiredPermissions(desired map[accesscontrol.SeedPermission]struct{}) {
|
||||
if desired == nil {
|
||||
s.builtinsPermissions = map[accesscontrol.SeedPermission]struct{}{}
|
||||
return
|
||||
}
|
||||
s.builtinsPermissions = desired
|
||||
}
|
||||
|
||||
// Seed loads current and desired permissions, diffs them (including scope updates), applies changes, and bumps versions.
|
||||
func (s *Seeder) Seed(ctx context.Context) error {
|
||||
previous, err := s.backend.LoadPrevious(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// - Do not remove plugin permissions when the plugin didn't register this run (Origin set but not in seededPlugins).
|
||||
// - Preserve legacy plugin app access permissions in the persisted seed set (these are granted by default).
|
||||
if len(previous) > 0 {
|
||||
filtered := make(map[accesscontrol.SeedPermission]struct{}, len(previous))
|
||||
for p := range previous {
|
||||
if p.Action == pluginaccesscontrol.ActionAppAccess {
|
||||
continue
|
||||
}
|
||||
if p.Origin != "" && !s.seededPlugins[p.Origin] {
|
||||
continue
|
||||
}
|
||||
filtered[p] = struct{}{}
|
||||
}
|
||||
previous = filtered
|
||||
}
|
||||
|
||||
added, removed, updated := s.permissionDiff(previous, s.builtinsPermissions)
|
||||
|
||||
if err := s.backend.Apply(ctx, added, removed, updated); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// SeedRoles populates the database with the roles and their assignments
|
||||
// It will create roles that do not exist and update roles that have changed
|
||||
// Do not use for provisioning. Validation is not enforced.
|
||||
func (s *Seeder) SeedRoles(ctx context.Context, registrationList []accesscontrol.RoleRegistration) error {
|
||||
roleMap, err := s.roleStore.LoadRoles(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
missingRoles := make([]accesscontrol.RoleRegistration, 0, len(registrationList))
|
||||
|
||||
// Diff existing roles with the ones we want to seed.
|
||||
// If a role is missing, we add it to the missingRoles list
|
||||
for _, registration := range registrationList {
|
||||
registration := registration
|
||||
role, ok := roleMap[registration.Role.Name]
|
||||
switch {
|
||||
case registration.Role.IsFixed():
|
||||
s.seededFixedRoles[registration.Role.Name] = true
|
||||
case registration.Role.IsPlugin():
|
||||
s.seededPluginRoles[registration.Role.Name] = true
|
||||
// To be resilient to failed plugin loadings, we remember the plugins that have registered,
|
||||
// later we'll ignore permissions and roles of other plugins
|
||||
s.seededPlugins[pluginutils.PluginIDFromName(registration.Role.Name)] = true
|
||||
}
|
||||
|
||||
s.rememberPermissionAssignments(®istration.Role, registration.Grants, registration.Exclude)
|
||||
|
||||
if !ok {
|
||||
missingRoles = append(missingRoles, registration)
|
||||
continue
|
||||
}
|
||||
|
||||
if needsRoleUpdate(role, registration.Role) {
|
||||
if err := s.roleStore.SetRole(ctx, role, registration.Role); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if needsPermissionsUpdate(role, registration.Role) {
|
||||
if err := s.roleStore.SetPermissions(ctx, role, registration.Role); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, registration := range missingRoles {
|
||||
if err := s.roleStore.CreateRole(ctx, registration.Role); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func needsPermissionsUpdate(existingRole *accesscontrol.RoleDTO, wantedRole accesscontrol.RoleDTO) bool {
|
||||
if existingRole == nil {
|
||||
return true
|
||||
}
|
||||
|
||||
if len(existingRole.Permissions) != len(wantedRole.Permissions) {
|
||||
return true
|
||||
}
|
||||
|
||||
for _, p := range wantedRole.Permissions {
|
||||
found := false
|
||||
for _, ep := range existingRole.Permissions {
|
||||
if ep.Action == p.Action && ep.Scope == p.Scope {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func needsRoleUpdate(existingRole *accesscontrol.RoleDTO, wantedRole accesscontrol.RoleDTO) bool {
|
||||
if existingRole == nil {
|
||||
return true
|
||||
}
|
||||
|
||||
if existingRole.Name != wantedRole.Name {
|
||||
return false
|
||||
}
|
||||
|
||||
if existingRole.DisplayName != wantedRole.DisplayName {
|
||||
return true
|
||||
}
|
||||
|
||||
if existingRole.Description != wantedRole.Description {
|
||||
return true
|
||||
}
|
||||
|
||||
if existingRole.Group != wantedRole.Group {
|
||||
return true
|
||||
}
|
||||
|
||||
if existingRole.Hidden != wantedRole.Hidden {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// Deprecated: SeedRole is deprecated and should not be used.
|
||||
// SeedRoles only does boot up seeding and should not be used for runtime seeding.
|
||||
func (s *Seeder) SeedRole(ctx context.Context, role accesscontrol.RoleDTO, builtInRoles []string) error {
|
||||
addedPermissions := make(map[string]struct{}, len(role.Permissions))
|
||||
permissions := make([]accesscontrol.Permission, 0, len(role.Permissions))
|
||||
for _, p := range role.Permissions {
|
||||
key := fmt.Sprintf("%s:%s", p.Action, p.Scope)
|
||||
if _, ok := addedPermissions[key]; !ok {
|
||||
addedPermissions[key] = struct{}{}
|
||||
permissions = append(permissions, accesscontrol.Permission{Action: p.Action, Scope: p.Scope})
|
||||
}
|
||||
}
|
||||
|
||||
wantedRole := accesscontrol.RoleDTO{
|
||||
OrgID: accesscontrol.GlobalOrgID,
|
||||
Version: role.Version,
|
||||
UID: role.UID,
|
||||
Name: role.Name,
|
||||
DisplayName: role.DisplayName,
|
||||
Description: role.Description,
|
||||
Group: role.Group,
|
||||
Permissions: permissions,
|
||||
Hidden: role.Hidden,
|
||||
}
|
||||
roleMap, err := s.roleStore.LoadRoles(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
existingRole := roleMap[wantedRole.Name]
|
||||
if existingRole == nil {
|
||||
if err := s.roleStore.CreateRole(ctx, wantedRole); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
if needsRoleUpdate(existingRole, wantedRole) {
|
||||
if err := s.roleStore.SetRole(ctx, existingRole, wantedRole); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if needsPermissionsUpdate(existingRole, wantedRole) {
|
||||
if err := s.roleStore.SetPermissions(ctx, existingRole, wantedRole); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Remember seeded roles
|
||||
if wantedRole.IsFixed() {
|
||||
s.seededFixedRoles[wantedRole.Name] = true
|
||||
}
|
||||
isPluginRole := wantedRole.IsPlugin()
|
||||
if isPluginRole {
|
||||
s.seededPluginRoles[wantedRole.Name] = true
|
||||
|
||||
// To be resilient to failed plugin loadings, we remember the plugins that have registered,
|
||||
// later we'll ignore permissions and roles of other plugins
|
||||
s.seededPlugins[pluginutils.PluginIDFromName(role.Name)] = true
|
||||
}
|
||||
|
||||
s.rememberPermissionAssignments(&wantedRole, builtInRoles, []string{})
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Seeder) rememberPermissionAssignments(role *accesscontrol.RoleDTO, builtInRoles []string, excludedRoles []string) {
|
||||
AppendDesiredPermissions(s.builtinsPermissions, s.log, role, builtInRoles, excludedRoles, true)
|
||||
}
|
||||
|
||||
// AppendDesiredPermissions accumulates permissions from a role registration onto basic roles (Viewer/Editor/Admin/Grafana Admin).
|
||||
// - It expands parents via accesscontrol.BuiltInRolesWithParents.
|
||||
// - It can optionally ignore plugin app access permissions (which are granted by default).
|
||||
func AppendDesiredPermissions(
|
||||
out map[accesscontrol.SeedPermission]struct{},
|
||||
logger log.Logger,
|
||||
role *accesscontrol.RoleDTO,
|
||||
builtInRoles []string,
|
||||
excludedRoles []string,
|
||||
ignorePluginAppAccess bool,
|
||||
) {
|
||||
if out == nil || role == nil {
|
||||
return
|
||||
}
|
||||
|
||||
for builtInRole := range accesscontrol.BuiltInRolesWithParents(builtInRoles) {
|
||||
// Skip excluded grants
|
||||
if slices.Contains(excludedRoles, builtInRole) {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, perm := range role.Permissions {
|
||||
if ignorePluginAppAccess && perm.Action == pluginaccesscontrol.ActionAppAccess {
|
||||
logger.Debug("Role is attempting to grant access permission, but this permission is already granted by default and will be ignored",
|
||||
"role", role.Name, "permission", perm.Action, "scope", perm.Scope)
|
||||
continue
|
||||
}
|
||||
|
||||
sp := accesscontrol.SeedPermission{
|
||||
BuiltInRole: builtInRole,
|
||||
Action: perm.Action,
|
||||
Scope: perm.Scope,
|
||||
}
|
||||
|
||||
if role.IsPlugin() {
|
||||
sp.Origin = pluginutils.PluginIDFromName(role.Name)
|
||||
}
|
||||
|
||||
out[sp] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// permissionDiff returns:
|
||||
// - added: present in desired permissions, not in previous permissions
|
||||
// - removed: present in previous permissions, not in desired permissions
|
||||
// - updated: same role + action, but scope changed
|
||||
func (s *Seeder) permissionDiff(previous, desired map[accesscontrol.SeedPermission]struct{}) (added, removed []accesscontrol.SeedPermission, updated map[accesscontrol.SeedPermission]accesscontrol.SeedPermission) {
|
||||
addedSet := make(map[accesscontrol.SeedPermission]struct{}, 0)
|
||||
for n := range desired {
|
||||
if _, already := previous[n]; !already {
|
||||
addedSet[n] = struct{}{}
|
||||
} else {
|
||||
delete(previous, n)
|
||||
}
|
||||
}
|
||||
|
||||
// Check if any of the new permissions is actually an old permission with an updated scope
|
||||
updated = make(map[accesscontrol.SeedPermission]accesscontrol.SeedPermission, 0)
|
||||
for n := range addedSet {
|
||||
for p := range previous {
|
||||
if n.BuiltInRole == p.BuiltInRole && n.Action == p.Action {
|
||||
updated[p] = n
|
||||
delete(addedSet, n)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for p := range addedSet {
|
||||
added = append(added, p)
|
||||
}
|
||||
|
||||
for p := range previous {
|
||||
if p.Action == pluginaccesscontrol.ActionAppAccess &&
|
||||
p.Scope != pluginaccesscontrol.ScopeProvider.GetResourceAllScope() {
|
||||
// Allows backward compatibility with plugins that have been seeded before the grant ignore rule was added
|
||||
s.log.Info("This permission already existed so it will not be removed",
|
||||
"role", p.BuiltInRole, "permission", p.Action, "scope", p.Scope)
|
||||
continue
|
||||
}
|
||||
|
||||
removed = append(removed, p)
|
||||
}
|
||||
|
||||
return added, removed, updated
|
||||
}
|
||||
|
||||
func (s *Seeder) ClearBasicRolesPluginPermissions(ID string) {
|
||||
removable := []accesscontrol.SeedPermission{}
|
||||
|
||||
for key := range s.builtinsPermissions {
|
||||
if matchPermissionByPluginID(key, ID) {
|
||||
removable = append(removable, key)
|
||||
}
|
||||
}
|
||||
|
||||
for _, perm := range removable {
|
||||
delete(s.builtinsPermissions, perm)
|
||||
}
|
||||
}
|
||||
|
||||
func matchPermissionByPluginID(perm accesscontrol.SeedPermission, pluginID string) bool {
|
||||
if perm.Origin != pluginID {
|
||||
return false
|
||||
}
|
||||
actionTemplate := regexp.MustCompile(fmt.Sprintf("%s[.:]", pluginID))
|
||||
scopeTemplate := fmt.Sprintf(":%s", pluginID)
|
||||
return actionTemplate.MatchString(perm.Action) || strings.HasSuffix(perm.Scope, scopeTemplate)
|
||||
}
|
||||
|
||||
// RolesToUpgrade returns the unique basic roles that should have their version incremented.
|
||||
func RolesToUpgrade(added, removed []accesscontrol.SeedPermission) []string {
|
||||
set := map[string]struct{}{}
|
||||
for _, p := range added {
|
||||
set[p.BuiltInRole] = struct{}{}
|
||||
}
|
||||
for _, p := range removed {
|
||||
set[p.BuiltInRole] = struct{}{}
|
||||
}
|
||||
out := make([]string, 0, len(set))
|
||||
for r := range set {
|
||||
out = append(out, r)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func (s *Seeder) ClearPluginRoles(ID string) {
|
||||
expectedPrefix := fmt.Sprintf("%s%s:", accesscontrol.PluginRolePrefix, ID)
|
||||
|
||||
for roleName := range s.seededPluginRoles {
|
||||
if strings.HasPrefix(roleName, expectedPrefix) {
|
||||
delete(s.seededPluginRoles, roleName)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Seeder) MarkSeededAlready() {
|
||||
s.hasSeededAlready = true
|
||||
}
|
||||
|
||||
func (s *Seeder) HasSeededAlready() bool {
|
||||
return s.hasSeededAlready
|
||||
}
|
||||
|
||||
func (s *Seeder) RemoveAbsentRoles(ctx context.Context) error {
|
||||
roleMap, errGet := s.roleStore.LoadRoles(ctx)
|
||||
if errGet != nil {
|
||||
s.log.Error("failed to get fixed roles from store", "err", errGet)
|
||||
return errGet
|
||||
}
|
||||
|
||||
toRemove := []string{}
|
||||
for _, r := range roleMap {
|
||||
if r == nil {
|
||||
continue
|
||||
}
|
||||
if r.IsFixed() {
|
||||
if !s.seededFixedRoles[r.Name] {
|
||||
s.log.Info("role is not seeded anymore, mark it for deletion", "role", r.Name)
|
||||
toRemove = append(toRemove, r.UID)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if r.IsPlugin() {
|
||||
if !s.seededPlugins[pluginutils.PluginIDFromName(r.Name)] {
|
||||
// To be resilient to failed plugin loadings
|
||||
// ignore stored roles related to plugins that have not registered this time
|
||||
s.log.Debug("plugin role has not been registered on this run skipping its removal", "role", r.Name)
|
||||
continue
|
||||
}
|
||||
if !s.seededPluginRoles[r.Name] {
|
||||
s.log.Info("role is not seeded anymore, mark it for deletion", "role", r.Name)
|
||||
toRemove = append(toRemove, r.UID)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if errDelete := s.roleStore.DeleteRoles(ctx, toRemove); errDelete != nil {
|
||||
s.log.Error("failed to delete absent fixed and plugin roles", "err", errDelete)
|
||||
return errDelete
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -294,6 +294,9 @@ type DashboardProvisioning struct {
|
||||
ExternalID string `xorm:"external_id"`
|
||||
CheckSum string
|
||||
Updated int64
|
||||
|
||||
// note: only used when writing metadata to unified storage resources - not saved in legacy table.
|
||||
AllowUIUpdates bool `xorm:"-"`
|
||||
}
|
||||
|
||||
type DeleteDashboardCommand struct {
|
||||
|
||||
@@ -1942,6 +1942,7 @@ func (dr *DashboardServiceImpl) saveProvisionedDashboardThroughK8s(ctx context.C
|
||||
// HOWEVER, maybe OK to leave this for now and "fix" it by using file provisioning for mode 4
|
||||
m.Kind = utils.ManagerKindClassicFP // nolint:staticcheck
|
||||
m.Identity = provisioning.Name
|
||||
m.AllowsEdits = provisioning.AllowUIUpdates
|
||||
s.Path = provisioning.ExternalID
|
||||
s.Checksum = provisioning.CheckSum
|
||||
s.TimestampMillis = time.Unix(provisioning.Updated, 0).UnixMilli()
|
||||
|
||||
@@ -2075,6 +2075,13 @@ var (
|
||||
FrontendOnly: true,
|
||||
Owner: grafanaDashboardsSquad,
|
||||
},
|
||||
{
|
||||
Name: "smoothingTransformation",
|
||||
Description: "Enables the ASAP smoothing transformation for time series data",
|
||||
Stage: FeatureStageExperimental,
|
||||
FrontendOnly: true,
|
||||
Owner: grafanaDataProSquad,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
Generated
+1
@@ -281,3 +281,4 @@ rudderstackUpgrade,experimental,@grafana/grafana-frontend-platform,false,false,t
|
||||
kubernetesAlertingHistorian,experimental,@grafana/alerting-squad,false,true,false
|
||||
useMTPlugins,experimental,@grafana/plugins-platform-backend,false,false,true
|
||||
multiPropsVariables,experimental,@grafana/dashboards-squad,false,false,true
|
||||
smoothingTransformation,experimental,@grafana/datapro,false,false,true
|
||||
|
||||
|
+13
@@ -3293,6 +3293,19 @@
|
||||
"codeowner": "@grafana/dashboards-squad"
|
||||
}
|
||||
},
|
||||
{
|
||||
"metadata": {
|
||||
"name": "smoothingTransformation",
|
||||
"resourceVersion": "1767349656275",
|
||||
"creationTimestamp": "2026-01-02T10:27:36Z"
|
||||
},
|
||||
"spec": {
|
||||
"description": "Enables the ASAP smoothing transformation for time series data",
|
||||
"stage": "experimental",
|
||||
"codeowner": "@grafana/datapro",
|
||||
"frontend": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"metadata": {
|
||||
"name": "sqlExpressions",
|
||||
|
||||
@@ -6,7 +6,6 @@ import (
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/grafana/grafana/pkg/middleware"
|
||||
"github.com/grafana/grafana/pkg/plugins"
|
||||
ac "github.com/grafana/grafana/pkg/services/accesscontrol"
|
||||
contextmodel "github.com/grafana/grafana/pkg/services/contexthandler/model"
|
||||
@@ -129,10 +128,6 @@ func (s *ServiceImpl) processAppPlugin(plugin pluginstore.Plugin, c *contextmode
|
||||
}
|
||||
|
||||
if include.Type == "page" {
|
||||
if !middleware.PageIsFeatureToggleEnabled(c.Req.Context(), include.Path) {
|
||||
s.log.Debug("Skipping page", "plugin", plugin.ID, "path", include.Path)
|
||||
continue
|
||||
}
|
||||
link := &navtree.NavLink{
|
||||
Text: include.Name,
|
||||
Icon: include.Icon,
|
||||
|
||||
@@ -358,6 +358,8 @@ func (fr *FileReader) saveDashboard(ctx context.Context, path string, folderID i
|
||||
Name: fr.Cfg.Name,
|
||||
Updated: resolvedFileInfo.ModTime().Unix(),
|
||||
CheckSum: jsonFile.checkSum,
|
||||
// adds `grafana.app/managerAllowsEdits` to the provisioned dashboards in unified storage. not used if in legacy.
|
||||
AllowUIUpdates: fr.Cfg.AllowUIUpdates,
|
||||
}
|
||||
_, err := fr.dashboardProvisioningService.SaveProvisionedDashboard(ctx, dash, dp)
|
||||
if err != nil {
|
||||
|
||||
@@ -33,6 +33,8 @@ import (
|
||||
)
|
||||
|
||||
func TestIntegrationFolderTreeZanzana(t *testing.T) {
|
||||
// TODO: Add back OSS seeding and enable this test
|
||||
t.Skip("Skipping folder tree test with Zanzana")
|
||||
testutil.SkipIntegrationTestInShortMode(t)
|
||||
|
||||
runIntegrationFolderTree(t, testinfra.GrafanaOpts{
|
||||
|
||||
@@ -1,7 +1,15 @@
|
||||
import { GrafanaConfig, locationUtil } from '@grafana/data';
|
||||
import * as folderHooks from 'app/api/clients/folder/v1beta1/hooks';
|
||||
import { backendSrv } from 'app/core/services/backend_srv';
|
||||
import { AnnoKeyFolder, AnnoKeyMessage, AnnoReloadOnParamsChange } from 'app/features/apiserver/types';
|
||||
import {
|
||||
AnnoKeyFolder,
|
||||
AnnoKeyManagerAllowsEdits,
|
||||
AnnoKeyManagerKind,
|
||||
AnnoKeyMessage,
|
||||
AnnoKeySourcePath,
|
||||
AnnoReloadOnParamsChange,
|
||||
ManagerKind,
|
||||
} from 'app/features/apiserver/types';
|
||||
import { DashboardDataDTO } from 'app/types/dashboard';
|
||||
|
||||
import { DashboardWithAccessInfo } from './types';
|
||||
@@ -215,6 +223,63 @@ describe('v1 dashboard API', () => {
|
||||
expect(result.meta.reloadOnParamsChange).toBe(true);
|
||||
});
|
||||
|
||||
describe('managed/provisioned dashboards', () => {
|
||||
it('should not mark dashboard as provisioned when manager allows UI edits', async () => {
|
||||
mockGet.mockResolvedValueOnce({
|
||||
...mockDashboardDto,
|
||||
metadata: {
|
||||
...mockDashboardDto.metadata,
|
||||
annotations: {
|
||||
[AnnoKeyManagerKind]: ManagerKind.Terraform,
|
||||
[AnnoKeyManagerAllowsEdits]: 'true',
|
||||
[AnnoKeySourcePath]: 'dashboards/test.json',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const api = new K8sDashboardAPI();
|
||||
const result = await api.getDashboardDTO('test');
|
||||
expect(result.meta.provisioned).toBe(false);
|
||||
expect(result.meta.provisionedExternalId).toBe('dashboards/test.json');
|
||||
});
|
||||
|
||||
it('should mark dashboard as provisioned when manager does not allow UI edits', async () => {
|
||||
mockGet.mockResolvedValueOnce({
|
||||
...mockDashboardDto,
|
||||
metadata: {
|
||||
...mockDashboardDto.metadata,
|
||||
annotations: {
|
||||
[AnnoKeyManagerKind]: ManagerKind.Terraform,
|
||||
[AnnoKeySourcePath]: 'dashboards/test.json',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const api = new K8sDashboardAPI();
|
||||
const result = await api.getDashboardDTO('test');
|
||||
expect(result.meta.provisioned).toBe(true);
|
||||
expect(result.meta.provisionedExternalId).toBe('dashboards/test.json');
|
||||
});
|
||||
|
||||
it('should not mark repository-managed dashboard as provisioned (locked)', async () => {
|
||||
mockGet.mockResolvedValueOnce({
|
||||
...mockDashboardDto,
|
||||
metadata: {
|
||||
...mockDashboardDto.metadata,
|
||||
annotations: {
|
||||
[AnnoKeyManagerKind]: ManagerKind.Repo,
|
||||
[AnnoKeySourcePath]: 'dashboards/test.json',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const api = new K8sDashboardAPI();
|
||||
const result = await api.getDashboardDTO('test');
|
||||
expect(result.meta.provisioned).toBe(false);
|
||||
expect(result.meta.provisionedExternalId).toBe('dashboards/test.json');
|
||||
});
|
||||
});
|
||||
|
||||
describe('saveDashboard', () => {
|
||||
beforeEach(() => {
|
||||
locationUtil.initialize({
|
||||
|
||||
@@ -164,7 +164,11 @@ export class K8sDashboardAPI implements DashboardAPI<DashboardDTO, Dashboard> {
|
||||
const managerKind = annotations[AnnoKeyManagerKind];
|
||||
|
||||
if (managerKind) {
|
||||
result.meta.provisioned = annotations[AnnoKeyManagerAllowsEdits] === 'true' || managerKind === ManagerKind.Repo;
|
||||
// `meta.provisioned` is used by the save/delete UI to decide if a dashboard is locked
|
||||
// (i.e. it can't be saved from the UI). This should match the legacy behavior where
|
||||
// `allowUiUpdates: true` keeps the dashboard editable/savable.
|
||||
const allowsEdits = annotations[AnnoKeyManagerAllowsEdits] === 'true';
|
||||
result.meta.provisioned = !allowsEdits && managerKind !== ManagerKind.Repo;
|
||||
result.meta.provisionedExternalId = annotations[AnnoKeySourcePath];
|
||||
}
|
||||
|
||||
|
||||
@@ -1612,6 +1612,53 @@ ${buildImageContent(
|
||||
`;
|
||||
},
|
||||
},
|
||||
smoothing: {
|
||||
name: 'Smoothing',
|
||||
getHelperDocs: function (imageRenderType: ImageRenderType = ImageRenderType.ShortcodeFigure) {
|
||||
return `
|
||||
Use this transformation to reduce noise in time series data through adaptive smoothing. This transformation creates smoother, cleaner visualizations while preserving all original time points and important trends and patterns in your data.
|
||||
|
||||
The smoothing transformation uses the ASAP (Automatic Smoothing for Attention Prioritization) algorithm internally to generate a smoothed curve, which is then interpolated back onto all original time points. This ensures your visualization maintains continuous lines without gaps while reducing noise.
|
||||
|
||||
#### Available options
|
||||
|
||||
- **Resolution** - Controls smoothing intensity (1-1000). Lower values create more aggressive smoothing, while higher values preserve more detail. The output preserves all original time points.
|
||||
|
||||
#### When to use smoothing
|
||||
|
||||
This transformation is useful for:
|
||||
|
||||
- Noisy time series data that obscures underlying trends
|
||||
- Clearer trend analysis and pattern recognition
|
||||
|
||||
#### Example
|
||||
|
||||
Consider noisy sensor data with thousands of points:
|
||||
|
||||
**Before smoothing:**
|
||||
|
||||
| Time | Temperature |
|
||||
| ------------------- | ----------- |
|
||||
| 2020-07-07 10:00:00 | 23.1 |
|
||||
| 2020-07-07 10:00:01 | 23.3 |
|
||||
| 2020-07-07 10:00:02 | 22.9 |
|
||||
| 2020-07-07 10:00:03 | 23.2 |
|
||||
| ... (thousands more) | ... |
|
||||
|
||||
**After smoothing (Resolution: 100):**
|
||||
|
||||
| Time | Temperature (smoothed) |
|
||||
| ------------------- | ---------------------- |
|
||||
| 2020-07-07 10:00:00 | 23.1 |
|
||||
| 2020-07-07 10:00:01 | 23.1 |
|
||||
| 2020-07-07 10:00:02 | 23.0 |
|
||||
| 2020-07-07 10:00:03 | 23.0 |
|
||||
| ... (same count) | ... |
|
||||
|
||||
The transformation preserves all original time points while reducing noise, resulting in smoother curves that maintain continuous lines without gaps.
|
||||
`;
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
function buildImageContent(source: string, imageRenderType: ImageRenderType, imageAltText: string) {
|
||||
|
||||
@@ -0,0 +1,72 @@
|
||||
<svg width="114" height="48" viewBox="0 0 114 48" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g clip-path="url(#clip0_smoothing_dark)">
|
||||
<path d="M0 0.611699V7.64012H7V0H0.601504C0.441975 0 0.28898 0.0644467 0.176176 0.179162C0.0633725 0.293878 0 0.449466 0 0.611699Z" fill="url(#paint0_linear_smoothing_dark)"/>
|
||||
<path d="M8 0H15V7.64012H8V0Z" fill="url(#paint1_linear_smoothing_dark)"/>
|
||||
<path d="M16 0H23V7.64012H16V0Z" fill="url(#paint2_linear_smoothing_dark)"/>
|
||||
<path d="M24 0H31V7.64012H24V0Z" fill="url(#paint3_linear_smoothing_dark)"/>
|
||||
<path d="M32 0H39V7.64012H32V0Z" fill="url(#paint4_linear_smoothing_dark)"/>
|
||||
<path d="M40 7.64012H48V0.611699C48 0.449466 47.9366 0.293878 47.8238 0.179162C47.711 0.0644467 47.558 0 47.3985 0H40V7.64012Z" fill="url(#paint5_linear_smoothing_dark)"/>
|
||||
<path d="M2 38L5 28L8 35L11 25L14 32L17 22L20 30L23 20L26 28L29 24L32 31L35 26L38 33L41 29L44 36L46 32" stroke="#84AFF1" stroke-width="2" fill="none" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M2 38L5 28L8 35L11 25L14 32L17 22L20 30L23 20L26 28L29 24L32 31L35 26L38 33L41 29L44 36L46 32L46 48L2 48Z" fill="url(#paint6_linear_smoothing_dark)" opacity="0.3"/>
|
||||
</g>
|
||||
<path d="M57.91 30C58.6 30 64 26 64 24C64 22 58.7 18 57.91 18C57.08 18 56.4 18.5 56.4 19.48C56.4 20.45 59.91 22.92 59.91 22.92C59.91 22.92 52.25 22.25 52 22.92C51.75 23.59 51.75 24.41 52 25.08C52.25 25.75 59.91 25.08 59.91 25.08C59.91 25.08 56.4 27.75 56.4 28.53C56.4 29.31 57.21 30 57.91 30Z" fill="#CCCCDC"/>
|
||||
<g clip-path="url(#clip1_smoothing_dark)">
|
||||
<path d="M66 0.611699V7.64012H81V0H66.6015C66.442 0 66.289 0.0644467 66.1762 0.179162C66.0634 0.293878 66 0.449466 66 0.611699Z" fill="url(#paint7_linear_smoothing_dark)"/>
|
||||
<path d="M82 0H97V7.64012H82V0Z" fill="url(#paint8_linear_smoothing_dark)"/>
|
||||
<path d="M98 7.64012H114V0.611699C114 0.449466 113.937 0.293878 113.824 0.179162C113.711 0.0644467 113.558 0 113.399 0H98V7.64012Z" fill="url(#paint9_linear_smoothing_dark)"/>
|
||||
<path d="M68 36Q77 28 86 24Q95 21 112 25" stroke="#84AFF1" stroke-width="2.5" fill="none" stroke-linecap="round"/>
|
||||
<path d="M68 36Q77 28 86 24Q95 21 112 25L112 48L68 48Z" fill="url(#paint10_linear_smoothing_dark)" opacity="0.3"/>
|
||||
</g>
|
||||
<defs>
|
||||
<linearGradient id="paint0_linear_smoothing_dark" x1="0" y1="3.82" x2="7" y2="3.82" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#F2CC0C"/>
|
||||
<stop offset="1" stop-color="#FF9830"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint1_linear_smoothing_dark" x1="8" y1="3.82" x2="15" y2="3.82" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#F2CC0C"/>
|
||||
<stop offset="1" stop-color="#FF9830"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint2_linear_smoothing_dark" x1="16" y1="3.82" x2="23" y2="3.82" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#F2CC0C"/>
|
||||
<stop offset="1" stop-color="#FF9830"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint3_linear_smoothing_dark" x1="24" y1="3.82" x2="31" y2="3.82" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#F2CC0C"/>
|
||||
<stop offset="1" stop-color="#FF9830"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint4_linear_smoothing_dark" x1="32" y1="3.82" x2="39" y2="3.82" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#F2CC0C"/>
|
||||
<stop offset="1" stop-color="#FF9830"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint5_linear_smoothing_dark" x1="40" y1="3.82" x2="48" y2="3.82" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#F2CC0C"/>
|
||||
<stop offset="1" stop-color="#FF9830"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint6_linear_smoothing_dark" x1="24" y1="48" x2="24" y2="20" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#1F60C4" stop-opacity="0"/>
|
||||
<stop offset="1" stop-color="#3865AB"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint7_linear_smoothing_dark" x1="66" y1="3.82" x2="81" y2="3.82" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#F2CC0C"/>
|
||||
<stop offset="1" stop-color="#FF9830"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint8_linear_smoothing_dark" x1="82" y1="3.82" x2="97" y2="3.82" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#F2CC0C"/>
|
||||
<stop offset="1" stop-color="#FF9830"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint9_linear_smoothing_dark" x1="98" y1="3.82" x2="114" y2="3.82" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#F2CC0C"/>
|
||||
<stop offset="1" stop-color="#FF9830"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint10_linear_smoothing_dark" x1="90" y1="48" x2="90" y2="21" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#1F60C4" stop-opacity="0"/>
|
||||
<stop offset="1" stop-color="#3865AB"/>
|
||||
</linearGradient>
|
||||
<clipPath id="clip0_smoothing_dark">
|
||||
<rect width="48" height="48" fill="white"/>
|
||||
</clipPath>
|
||||
<clipPath id="clip1_smoothing_dark">
|
||||
<rect width="48" height="48" fill="white" transform="translate(66)"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 4.6 KiB |
@@ -0,0 +1,72 @@
|
||||
<svg width="114" height="48" viewBox="0 0 114 48" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g clip-path="url(#clip0_smoothing_light)">
|
||||
<path d="M0 0.611699V7.64012H7V0H0.601504C0.441975 0 0.28898 0.0644467 0.176176 0.179162C0.0633725 0.293878 0 0.449466 0 0.611699Z" fill="url(#paint0_linear_smoothing_light)"/>
|
||||
<path d="M8 0H15V7.64012H8V0Z" fill="url(#paint1_linear_smoothing_light)"/>
|
||||
<path d="M16 0H23V7.64012H16V0Z" fill="url(#paint2_linear_smoothing_light)"/>
|
||||
<path d="M24 0H31V7.64012H24V0Z" fill="url(#paint3_linear_smoothing_light)"/>
|
||||
<path d="M32 0H39V7.64012H32V0Z" fill="url(#paint4_linear_smoothing_light)"/>
|
||||
<path d="M40 7.64012H48V0.611699C48 0.449466 47.9366 0.293878 47.8238 0.179162C47.711 0.0644467 47.558 0 47.3985 0H40V7.64012Z" fill="url(#paint5_linear_smoothing_light)"/>
|
||||
<path d="M2 38L5 28L8 35L11 25L14 32L17 22L20 30L23 20L26 28L29 24L32 31L35 26L38 33L41 29L44 36L46 32" stroke="#84AFF1" stroke-width="2" fill="none" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M2 38L5 28L8 35L11 25L14 32L17 22L20 30L23 20L26 28L29 24L32 31L35 26L38 33L41 29L44 36L46 32L46 48L2 48Z" fill="url(#paint6_linear_smoothing_light)" opacity="0.3"/>
|
||||
</g>
|
||||
<path d="M57.91 30C58.6 30 64 26 64 24C64 22 58.7 18 57.91 18C57.08 18 56.4 18.5 56.4 19.48C56.4 20.45 59.91 22.92 59.91 22.92C59.91 22.92 52.25 22.25 52 22.92C51.75 23.59 51.75 24.41 52 25.08C52.25 25.75 59.91 25.08 59.91 25.08C59.91 25.08 56.4 27.75 56.4 28.53C56.4 29.31 57.21 30 57.91 30Z" fill="#24292E"/>
|
||||
<g clip-path="url(#clip1_smoothing_light)">
|
||||
<path d="M66 0.611699V7.64012H81V0H66.6015C66.442 0 66.289 0.0644467 66.1762 0.179162C66.0634 0.293878 66 0.449466 66 0.611699Z" fill="url(#paint7_linear_smoothing_light)"/>
|
||||
<path d="M82 0H97V7.64012H82V0Z" fill="url(#paint8_linear_smoothing_light)"/>
|
||||
<path d="M98 7.64012H114V0.611699C114 0.449466 113.937 0.293878 113.824 0.179162C113.711 0.0644467 113.558 0 113.399 0H98V7.64012Z" fill="url(#paint9_linear_smoothing_light)"/>
|
||||
<path d="M68 36Q77 28 86 24Q95 21 112 25" stroke="#84AFF1" stroke-width="2.5" fill="none" stroke-linecap="round"/>
|
||||
<path d="M68 36Q77 28 86 24Q95 21 112 25L112 48L68 48Z" fill="url(#paint10_linear_smoothing_light)" opacity="0.3"/>
|
||||
</g>
|
||||
<defs>
|
||||
<linearGradient id="paint0_linear_smoothing_light" x1="0" y1="3.82" x2="7" y2="3.82" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#F2CC0C"/>
|
||||
<stop offset="1" stop-color="#FF9830"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint1_linear_smoothing_light" x1="8" y1="3.82" x2="15" y2="3.82" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#F2CC0C"/>
|
||||
<stop offset="1" stop-color="#FF9830"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint2_linear_smoothing_light" x1="16" y1="3.82" x2="23" y2="3.82" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#F2CC0C"/>
|
||||
<stop offset="1" stop-color="#FF9830"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint3_linear_smoothing_light" x1="24" y1="3.82" x2="31" y2="3.82" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#F2CC0C"/>
|
||||
<stop offset="1" stop-color="#FF9830"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint4_linear_smoothing_light" x1="32" y1="3.82" x2="39" y2="3.82" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#F2CC0C"/>
|
||||
<stop offset="1" stop-color="#FF9830"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint5_linear_smoothing_light" x1="40" y1="3.82" x2="48" y2="3.82" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#F2CC0C"/>
|
||||
<stop offset="1" stop-color="#FF9830"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint6_linear_smoothing_light" x1="24" y1="48" x2="24" y2="20" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#1F60C4" stop-opacity="0"/>
|
||||
<stop offset="1" stop-color="#3865AB"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint7_linear_smoothing_light" x1="66" y1="3.82" x2="81" y2="3.82" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#F2CC0C"/>
|
||||
<stop offset="1" stop-color="#FF9830"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint8_linear_smoothing_light" x1="82" y1="3.82" x2="97" y2="3.82" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#F2CC0C"/>
|
||||
<stop offset="1" stop-color="#FF9830"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint9_linear_smoothing_light" x1="98" y1="3.82" x2="114" y2="3.82" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#F2CC0C"/>
|
||||
<stop offset="1" stop-color="#FF9830"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint10_linear_smoothing_light" x1="90" y1="48" x2="90" y2="21" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#1F60C4" stop-opacity="0"/>
|
||||
<stop offset="1" stop-color="#3865AB"/>
|
||||
</linearGradient>
|
||||
<clipPath id="clip0_smoothing_light">
|
||||
<rect width="48" height="48" fill="white"/>
|
||||
</clipPath>
|
||||
<clipPath id="clip1_smoothing_light">
|
||||
<rect width="48" height="48" fill="white" transform="translate(66)"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 4.6 KiB |
@@ -0,0 +1,130 @@
|
||||
import { asapSmooth, DataPoint, ASAPOptions } from './asap';
|
||||
|
||||
describe('asapSmooth', () => {
|
||||
describe('Basic functionality', () => {
|
||||
it('should return smoothed data with valid DataPoint objects', () => {
|
||||
const data: DataPoint[] = [
|
||||
{ x: 0, y: 0 },
|
||||
{ x: 1, y: 1 },
|
||||
{ x: 2, y: 2 },
|
||||
{ x: 3, y: 3 },
|
||||
{ x: 4, y: 4 },
|
||||
];
|
||||
|
||||
const options: ASAPOptions = { resolution: 3 };
|
||||
const result = asapSmooth(data, options);
|
||||
|
||||
expect(result.length).toBeGreaterThan(0);
|
||||
result.forEach((point) => {
|
||||
expect(point).toHaveProperty('x');
|
||||
expect(point).toHaveProperty('y');
|
||||
expect(typeof point.x).toBe('number');
|
||||
expect(typeof point.y).toBe('number');
|
||||
});
|
||||
});
|
||||
|
||||
it('should maintain x-axis ordering', () => {
|
||||
const data: DataPoint[] = Array.from({ length: 20 }, (_, i) => ({
|
||||
x: i,
|
||||
y: Math.random() * 100,
|
||||
}));
|
||||
|
||||
const options: ASAPOptions = { resolution: 10 };
|
||||
const result = asapSmooth(data, options);
|
||||
|
||||
// check that x values are in ascending order
|
||||
for (let i = 1; i < result.length; i++) {
|
||||
expect(result[i].x).toBeGreaterThanOrEqual(result[i - 1].x);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge cases', () => {
|
||||
it('should handle empty array', () => {
|
||||
const data: DataPoint[] = [];
|
||||
const options: ASAPOptions = { resolution: 10 };
|
||||
|
||||
const result = asapSmooth(data, options);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle single data point', () => {
|
||||
const data: DataPoint[] = [{ x: 1, y: 42 }];
|
||||
const options: ASAPOptions = { resolution: 10 };
|
||||
|
||||
const result = asapSmooth(data, options);
|
||||
|
||||
expect(result.length).toBeGreaterThan(0);
|
||||
expect(result[0].x).toBe(1);
|
||||
expect(result[0].y).toBe(42);
|
||||
});
|
||||
|
||||
it('should filter out NaN values', () => {
|
||||
const data: DataPoint[] = [
|
||||
{ x: 0, y: 0 },
|
||||
{ x: 1, y: NaN },
|
||||
{ x: 2, y: 2 },
|
||||
{ x: 3, y: NaN },
|
||||
{ x: 4, y: 4 },
|
||||
];
|
||||
|
||||
const options: ASAPOptions = { resolution: 3 };
|
||||
const result = asapSmooth(data, options);
|
||||
|
||||
expect(result.length).toBeGreaterThan(0);
|
||||
result.forEach((point) => {
|
||||
expect(isNaN(point.x)).toBe(false);
|
||||
expect(isNaN(point.y)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
it('should return empty array when all values are NaN', () => {
|
||||
const data: DataPoint[] = [
|
||||
{ x: 0, y: NaN },
|
||||
{ x: 1, y: NaN },
|
||||
{ x: 2, y: NaN },
|
||||
];
|
||||
|
||||
const options: ASAPOptions = { resolution: 3 };
|
||||
const result = asapSmooth(data, options);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should sort unsorted data', () => {
|
||||
const data: DataPoint[] = [
|
||||
{ x: 3, y: 3 },
|
||||
{ x: 1, y: 1 },
|
||||
{ x: 4, y: 4 },
|
||||
{ x: 0, y: 0 },
|
||||
{ x: 2, y: 2 },
|
||||
];
|
||||
|
||||
const options: ASAPOptions = { resolution: 3 };
|
||||
const result = asapSmooth(data, options);
|
||||
|
||||
expect(result.length).toBeGreaterThan(0);
|
||||
|
||||
// result should be sorted by x
|
||||
for (let i = 1; i < result.length; i++) {
|
||||
expect(result[i].x).toBeGreaterThanOrEqual(result[i - 1].x);
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle negative values', () => {
|
||||
const data: DataPoint[] = Array.from({ length: 10 }, (_, i) => ({
|
||||
x: i,
|
||||
y: -i * 2,
|
||||
}));
|
||||
|
||||
const options: ASAPOptions = { resolution: 5 };
|
||||
const result = asapSmooth(data, options);
|
||||
|
||||
expect(result.length).toBeGreaterThan(0);
|
||||
result.forEach((point) => {
|
||||
expect(isFinite(point.y)).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,40 @@
|
||||
import { ASAP } from 'downsample';
|
||||
|
||||
export interface DataPoint {
|
||||
x: number;
|
||||
y: number;
|
||||
}
|
||||
|
||||
export interface ASAPOptions {
|
||||
resolution: number;
|
||||
}
|
||||
|
||||
export function asapSmooth(data: DataPoint[], options: ASAPOptions): DataPoint[] {
|
||||
const { resolution } = options;
|
||||
|
||||
if (!data || data.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
// Filter invalid points and convert to tuple format for ASAP library
|
||||
const inputData: Array<[number, number]> = data
|
||||
.filter((point) => point != null && !isNaN(point.x) && !isNaN(point.y))
|
||||
.map((point) => [point.x, point.y]);
|
||||
|
||||
if (inputData.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
// this prevents O(m×n) degradation if inputData is unsorted data
|
||||
inputData.sort((a, b) => a[0] - b[0]);
|
||||
|
||||
// ASAP always returns objects with x and y properties
|
||||
const smoothedData = ASAP(inputData, resolution);
|
||||
|
||||
// Convert back to DataPoint format
|
||||
const result: DataPoint[] = Array.from(smoothedData).filter(
|
||||
(item): item is DataPoint => item !== null && typeof item === 'object' && 'x' in item && 'y' in item
|
||||
);
|
||||
|
||||
return result;
|
||||
}
|
||||
@@ -0,0 +1,744 @@
|
||||
import {
|
||||
DataFrame,
|
||||
DataTransformContext,
|
||||
FieldType,
|
||||
toDataFrame,
|
||||
TransformationApplicabilityLevels,
|
||||
} from '@grafana/data';
|
||||
|
||||
import { calculateMaxSourcePoints, getSmoothingTransformer, SmoothingTransformerOptions } from './smoothing';
|
||||
|
||||
describe('Smoothing transformer', () => {
|
||||
const smoothingTransformer = getSmoothingTransformer();
|
||||
const ctx: DataTransformContext = {
|
||||
interpolate: (v: string) => v,
|
||||
};
|
||||
|
||||
describe('isApplicable', () => {
|
||||
it('should return Applicable for time series frames', () => {
|
||||
const frames = [
|
||||
toDataFrame({
|
||||
name: 'time series',
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [1000, 2000, 3000] },
|
||||
{ name: 'value', type: FieldType.number, values: [10, 20, 15] },
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
expect(smoothingTransformer.isApplicable!(frames)).toBe(TransformationApplicabilityLevels.Applicable);
|
||||
});
|
||||
|
||||
it('should return NotApplicable for frames without time field', () => {
|
||||
const frames = [
|
||||
toDataFrame({
|
||||
name: 'no time field',
|
||||
fields: [
|
||||
{ name: 'category', type: FieldType.string, values: ['A', 'B', 'C'] },
|
||||
{ name: 'value', type: FieldType.number, values: [10, 20, 15] },
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
expect(smoothingTransformer.isApplicable!(frames)).toBe(TransformationApplicabilityLevels.NotApplicable);
|
||||
});
|
||||
|
||||
it('should return Applicable if at least one frame is a time series', () => {
|
||||
const frames = [
|
||||
toDataFrame({
|
||||
name: 'not time series',
|
||||
fields: [
|
||||
{ name: 'category', type: FieldType.string, values: ['A', 'B', 'C'] },
|
||||
{ name: 'label', type: FieldType.string, values: ['X', 'Y', 'Z'] },
|
||||
],
|
||||
}),
|
||||
toDataFrame({
|
||||
name: 'time series',
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [1000, 2000, 3000] },
|
||||
{ name: 'value', type: FieldType.number, values: [10, 20, 15] },
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
expect(smoothingTransformer.isApplicable!(frames)).toBe(TransformationApplicabilityLevels.Applicable);
|
||||
});
|
||||
|
||||
it('should return NotApplicable for empty data', () => {
|
||||
const frames: DataFrame[] = [];
|
||||
|
||||
expect(smoothingTransformer.isApplicable!(frames)).toBe(TransformationApplicabilityLevels.NotApplicable);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Basic functionality', () => {
|
||||
it('should smooth time series data with default settings', () => {
|
||||
const source = [
|
||||
toDataFrame({
|
||||
name: 'test data',
|
||||
refId: 'A',
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [1000, 2000, 3000, 4000, 5000] },
|
||||
{ name: 'value', type: FieldType.number, values: [10, 20, 15, 25, 18] },
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
const config: SmoothingTransformerOptions = {};
|
||||
|
||||
const result = smoothingTransformer.transformer(config, ctx)(source);
|
||||
|
||||
// should return both original and smoothed frames
|
||||
expect(result).toHaveLength(2);
|
||||
|
||||
// first frame should be the original, unchanged
|
||||
expect(result[0].name).toBe('test data');
|
||||
expect(result[0].fields).toHaveLength(2);
|
||||
expect(result[0].fields[0].name).toBe('time');
|
||||
expect(result[0].fields[1].name).toBe('value');
|
||||
expect(result[0].fields[1].values).toEqual([10, 20, 15, 25, 18]);
|
||||
|
||||
// second frame should be the smoothed version
|
||||
expect(result[1].name).toBe('Smoothed');
|
||||
expect(result[1].fields).toHaveLength(2);
|
||||
expect(result[1].fields[0].name).toBe('time');
|
||||
expect(result[1].fields[1].name).toBe('value');
|
||||
|
||||
// should preserve original time points
|
||||
expect(result[1].fields[0].values).toEqual([1000, 2000, 3000, 4000, 5000]);
|
||||
// should have corresponding smoothed values
|
||||
expect(result[1].fields[1].values.length).toBe(5);
|
||||
});
|
||||
|
||||
it('should handle multiple numeric fields', () => {
|
||||
const source = [
|
||||
toDataFrame({
|
||||
name: 'multi field data',
|
||||
refId: 'B',
|
||||
fields: [
|
||||
{ name: 'timestamp', type: FieldType.time, values: [1000, 2000, 3000, 4000] },
|
||||
{ name: 'cpu', type: FieldType.number, values: [50, 75, 60, 80] },
|
||||
{ name: 'memory', type: FieldType.number, values: [40, 55, 45, 65] },
|
||||
{ name: 'label', type: FieldType.string, values: ['a', 'b', 'c', 'd'] },
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
const config: SmoothingTransformerOptions = { resolution: 3 };
|
||||
|
||||
const result = smoothingTransformer.transformer(config, ctx)(source);
|
||||
|
||||
// should return both original and smoothed frames
|
||||
expect(result).toHaveLength(2);
|
||||
|
||||
// first frame is original
|
||||
expect(result[0].name).toBe('multi field data');
|
||||
expect(result[0].fields[1].name).toBe('cpu');
|
||||
expect(result[0].fields[2].name).toBe('memory');
|
||||
|
||||
// second frame is smoothed
|
||||
expect(result[1].fields).toHaveLength(4);
|
||||
expect(result[1].fields[0].name).toBe('timestamp');
|
||||
expect(result[1].fields[1].name).toBe('cpu');
|
||||
expect(result[1].fields[2].name).toBe('memory');
|
||||
expect(result[1].fields[3].name).toBe('label');
|
||||
|
||||
// all numeric fields should be smoothed and preserve original time points
|
||||
expect(result[1].fields[0].values.length).toBe(4);
|
||||
expect(result[1].fields[1].values.length).toBe(4);
|
||||
expect(result[1].fields[2].values.length).toBe(4);
|
||||
});
|
||||
|
||||
it('should preserve non-numeric and non-time fields', () => {
|
||||
const source = [
|
||||
toDataFrame({
|
||||
name: 'mixed data',
|
||||
refId: 'C',
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [1000, 2000, 3000] },
|
||||
{ name: 'value', type: FieldType.number, values: [10, 20, 15] },
|
||||
{ name: 'category', type: FieldType.string, values: ['A', 'B', 'C'] },
|
||||
{ name: 'active', type: FieldType.boolean, values: [true, false, true] },
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
const config: SmoothingTransformerOptions = { resolution: 2 };
|
||||
|
||||
const result = smoothingTransformer.transformer(config, ctx)(source);
|
||||
|
||||
// should return both original and smoothed frames
|
||||
expect(result).toHaveLength(2);
|
||||
|
||||
// smoothed frame should preserve non-numeric fields
|
||||
expect(result[1].fields[2].name).toBe('category');
|
||||
expect(result[1].fields[2].type).toBe(FieldType.string);
|
||||
expect(result[1].fields[3].name).toBe('active');
|
||||
expect(result[1].fields[3].type).toBe(FieldType.boolean);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Configuration options', () => {
|
||||
it('should use default resolution when not specified', () => {
|
||||
const source = [
|
||||
toDataFrame({
|
||||
name: 'default test',
|
||||
refId: 'A',
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: Array.from({ length: 200 }, (_, i) => i * 1000) },
|
||||
{ name: 'value', type: FieldType.number, values: Array.from({ length: 200 }, () => Math.random() * 100) },
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
const config: SmoothingTransformerOptions = {};
|
||||
|
||||
const result = smoothingTransformer.transformer(config, ctx)(source);
|
||||
|
||||
// should return both original and smoothed frames
|
||||
expect(result).toHaveLength(2);
|
||||
|
||||
// smoothed frame should preserve all original time points
|
||||
expect(result[1].fields[0].values.length).toBe(200);
|
||||
expect(result[1].fields[1].values.length).toBe(200);
|
||||
});
|
||||
|
||||
it('should respect custom resolution settings', () => {
|
||||
const source = [
|
||||
toDataFrame({
|
||||
name: 'resolution test',
|
||||
refId: 'A',
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: Array.from({ length: 100 }, (_, i) => i * 1000) },
|
||||
{ name: 'value', type: FieldType.number, values: Array.from({ length: 100 }, () => Math.random() * 100) },
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
const config: SmoothingTransformerOptions = { resolution: 25 };
|
||||
|
||||
const result = smoothingTransformer.transformer(config, ctx)(source);
|
||||
|
||||
// should return both original and smoothed frames
|
||||
expect(result).toHaveLength(2);
|
||||
|
||||
// smoothed frame should preserve all original time points regardless of resolution
|
||||
expect(result[1].fields[0].values.length).toBe(100);
|
||||
expect(result[1].fields[1].values.length).toBe(100);
|
||||
});
|
||||
|
||||
it('should clamp resolution to minimum value', () => {
|
||||
const source = [
|
||||
toDataFrame({
|
||||
name: 'small resolution test',
|
||||
refId: 'A',
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [1000, 2000, 3000, 4000, 5000] },
|
||||
{ name: 'value', type: FieldType.number, values: [10, 20, 15, 25, 18] },
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
// request resolution below minimum, it should be clamped to 1
|
||||
const config: SmoothingTransformerOptions = { resolution: 2 };
|
||||
|
||||
const result = smoothingTransformer.transformer(config, ctx)(source);
|
||||
|
||||
// should return both original and smoothed frames
|
||||
expect(result).toHaveLength(2);
|
||||
|
||||
// smoothed frame should preserve all original time points and clamp resolution to minimum
|
||||
expect(result[1].fields[0].values.length).toBe(5);
|
||||
expect(result[1].fields[1].values.length).toBe(5);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge cases', () => {
|
||||
it('should handle empty data frames', () => {
|
||||
const source: DataFrame[] = [];
|
||||
|
||||
const config: SmoothingTransformerOptions = {};
|
||||
|
||||
const result = smoothingTransformer.transformer(config, ctx)(source);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle frames without time fields', () => {
|
||||
const source = [
|
||||
toDataFrame({
|
||||
name: 'no time field',
|
||||
refId: 'A',
|
||||
fields: [
|
||||
{ name: 'category', type: FieldType.string, values: ['A', 'B', 'C'] },
|
||||
{ name: 'value', type: FieldType.number, values: [10, 20, 15] },
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
const config: SmoothingTransformerOptions = {};
|
||||
|
||||
const result = smoothingTransformer.transformer(config, ctx)(source);
|
||||
|
||||
// should return original frame unchanged
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0]).toEqual(source[0]);
|
||||
});
|
||||
|
||||
it('should handle frames without numeric fields', () => {
|
||||
const source = [
|
||||
toDataFrame({
|
||||
name: 'no numeric fields',
|
||||
refId: 'A',
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [1000, 2000, 3000] },
|
||||
{ name: 'category', type: FieldType.string, values: ['A', 'B', 'C'] },
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
const config: SmoothingTransformerOptions = {};
|
||||
|
||||
const result = smoothingTransformer.transformer(config, ctx)(source);
|
||||
|
||||
// should return original frame unchanged
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0]).toEqual(source[0]);
|
||||
});
|
||||
|
||||
it('should filter out NaN values when smoothing', () => {
|
||||
const source = [
|
||||
toDataFrame({
|
||||
name: 'data with NaN',
|
||||
refId: 'A',
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [1000, 2000, 3000, 4000, 5000] },
|
||||
{ name: 'value', type: FieldType.number, values: [10, NaN, 15, 25, NaN] },
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
const config: SmoothingTransformerOptions = { resolution: 3 };
|
||||
|
||||
const result = smoothingTransformer.transformer(config, ctx)(source);
|
||||
|
||||
// should return both original and smoothed frames
|
||||
expect(result).toHaveLength(2);
|
||||
|
||||
// smoothed frame should preserve all time points
|
||||
expect(result[1].fields[0].values.length).toBe(5);
|
||||
expect(result[1].fields[1].values.length).toBe(5);
|
||||
|
||||
// all values should be interpolated from smoothed curve (no nulls)
|
||||
const values = result[1].fields[1].values;
|
||||
values.forEach((value) => {
|
||||
expect(value).not.toBeNull();
|
||||
expect(typeof value).toBe('number');
|
||||
expect(isNaN(value)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle data with all NaN values', () => {
|
||||
const source = [
|
||||
toDataFrame({
|
||||
name: 'all NaN data',
|
||||
refId: 'A',
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [1000, 2000, 3000] },
|
||||
{ name: 'value', type: FieldType.number, values: [NaN, NaN, NaN] },
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
const config: SmoothingTransformerOptions = {};
|
||||
|
||||
const result = smoothingTransformer.transformer(config, ctx)(source);
|
||||
|
||||
// When all values are NaN, only original frame should be returned (no smoothed frame)
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].fields[1].name).toBe('value'); // No "(smoothed)" suffix
|
||||
expect(result[0].fields[1].values).toEqual([NaN, NaN, NaN]);
|
||||
expect(result[0].name).toBe('all NaN data'); // Original name preserved
|
||||
});
|
||||
|
||||
it('should handle data with null values', () => {
|
||||
const source = [
|
||||
toDataFrame({
|
||||
name: 'data with nulls',
|
||||
refId: 'A',
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [1000, 2000, 3000, 4000] },
|
||||
{ name: 'value', type: FieldType.number, values: [10, null, 15, 25] },
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
const config: SmoothingTransformerOptions = { resolution: 3 };
|
||||
|
||||
const result = smoothingTransformer.transformer(config, ctx)(source);
|
||||
|
||||
// should return both original and smoothed frames
|
||||
expect(result).toHaveLength(2);
|
||||
|
||||
// smoothed frame should preserve all time points
|
||||
expect(result[1].fields[0].values.length).toBe(4);
|
||||
expect(result[1].fields[1].values.length).toBe(4);
|
||||
|
||||
// all values should be interpolated (no nulls in output)
|
||||
const values = result[1].fields[1].values;
|
||||
values.forEach((value) => {
|
||||
expect(value).not.toBeNull();
|
||||
expect(typeof value).toBe('number');
|
||||
expect(isNaN(value)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle single data point', () => {
|
||||
const source = [
|
||||
toDataFrame({
|
||||
name: 'single point',
|
||||
refId: 'A',
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [1000] },
|
||||
{ name: 'value', type: FieldType.number, values: [42] },
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
const config: SmoothingTransformerOptions = {};
|
||||
|
||||
const result = smoothingTransformer.transformer(config, ctx)(source);
|
||||
|
||||
// should return both original and smoothed frames
|
||||
expect(result).toHaveLength(2);
|
||||
|
||||
expect(result[1].fields[0].values).toHaveLength(1);
|
||||
expect(result[1].fields[1].values).toHaveLength(1);
|
||||
expect(result[1].fields[1].values[0]).toBe(42);
|
||||
});
|
||||
|
||||
it('should handle empty numeric field values', () => {
|
||||
const source = [
|
||||
toDataFrame({
|
||||
name: 'empty values',
|
||||
refId: 'A',
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [1000, 2000, 3000] },
|
||||
{ name: 'value', type: FieldType.number, values: [] },
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
const config: SmoothingTransformerOptions = {};
|
||||
|
||||
const result = smoothingTransformer.transformer(config, ctx)(source);
|
||||
|
||||
// should return original frame since no numeric data to smooth
|
||||
expect(result[0]).toEqual(source[0]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Data integrity', () => {
|
||||
it('should maintain time ordering in smoothed data', () => {
|
||||
const source = [
|
||||
toDataFrame({
|
||||
name: 'ordered data',
|
||||
refId: 'A',
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [1000, 2000, 3000, 4000, 5000] },
|
||||
{ name: 'value', type: FieldType.number, values: [10, 20, 15, 25, 18] },
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
const config: SmoothingTransformerOptions = { resolution: 4 };
|
||||
|
||||
const result = smoothingTransformer.transformer(config, ctx)(source);
|
||||
|
||||
// check smoothed frame's time values
|
||||
const timeValues = result[1].fields[0].values as number[];
|
||||
|
||||
// check that time values are in ascending order
|
||||
for (let i = 1; i < timeValues.length; i++) {
|
||||
expect(timeValues[i]).toBeGreaterThanOrEqual(timeValues[i - 1]);
|
||||
}
|
||||
});
|
||||
|
||||
it('should preserve original frame metadata', () => {
|
||||
const source = [
|
||||
toDataFrame({
|
||||
name: 'original name',
|
||||
refId: 'TEST',
|
||||
meta: { custom: { test: 'value' } },
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [1000, 2000, 3000] },
|
||||
{ name: 'value', type: FieldType.number, values: [10, 20, 15] },
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
const config: SmoothingTransformerOptions = {};
|
||||
|
||||
const result = smoothingTransformer.transformer(config, ctx)(source);
|
||||
|
||||
// should return both original and smoothed frames
|
||||
expect(result).toHaveLength(2);
|
||||
|
||||
// original frame unchanged
|
||||
expect(result[0].refId).toBe('TEST');
|
||||
expect(result[0].meta).toEqual(source[0].meta);
|
||||
expect(result[0].name).toBe('original name');
|
||||
|
||||
// smoothed frame preserves metadata
|
||||
expect(result[1].refId).toBe('TEST');
|
||||
expect(result[1].meta).toEqual(source[0].meta);
|
||||
expect(result[1].name).toBe('Smoothed');
|
||||
});
|
||||
|
||||
it('should handle frames with no name', () => {
|
||||
const source = [
|
||||
toDataFrame({
|
||||
refId: 'A',
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [1000, 2000, 3000] },
|
||||
{ name: 'value', type: FieldType.number, values: [10, 20, 15] },
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
const config: SmoothingTransformerOptions = {};
|
||||
|
||||
const result = smoothingTransformer.transformer(config, ctx)(source);
|
||||
|
||||
// should return both original and smoothed frames
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[1].name).toBe('Smoothed');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Real-world scenarios', () => {
|
||||
it('should handle sparse data with irregular intervals', () => {
|
||||
// based on real user data with ~10 points over 30 minutes
|
||||
const source = [
|
||||
toDataFrame({
|
||||
name: 'temperature',
|
||||
refId: 'A',
|
||||
fields: [
|
||||
{
|
||||
name: 'time',
|
||||
type: FieldType.time,
|
||||
values: [
|
||||
1733999700000, 1733999790000, 1734000000000, 1734000210000, 1734000420000, 1734000630000, 1734000840000,
|
||||
1734001050000, 1734001260000, 1734001470000,
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'value',
|
||||
type: FieldType.number,
|
||||
values: [31.1, 31.1, 30.2, 30.8, 29.8, 30.0, 29.3, 28.6, 29.6, 30.5],
|
||||
},
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
const config: SmoothingTransformerOptions = { resolution: 20 };
|
||||
|
||||
const result = smoothingTransformer.transformer(config, ctx)(source);
|
||||
|
||||
// should return both original and smoothed frames
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[1].fields[0].values.length).toBe(10);
|
||||
expect(result[1].fields[1].values.length).toBe(10);
|
||||
|
||||
// all values should be non-null numbers
|
||||
const values = result[1].fields[1].values;
|
||||
values.forEach((value) => {
|
||||
expect(value).not.toBeNull();
|
||||
expect(typeof value).toBe('number');
|
||||
expect(isNaN(value)).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Multiple frames', () => {
|
||||
it('should process multiple frames independently', () => {
|
||||
const source = [
|
||||
toDataFrame({
|
||||
name: 'frame1',
|
||||
refId: 'A',
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [1000, 2000, 3000] },
|
||||
{ name: 'value', type: FieldType.number, values: [10, 20, 15] },
|
||||
],
|
||||
}),
|
||||
toDataFrame({
|
||||
name: 'frame2',
|
||||
refId: 'B',
|
||||
fields: [
|
||||
{ name: 'timestamp', type: FieldType.time, values: [4000, 5000, 6000] },
|
||||
{ name: 'metric', type: FieldType.number, values: [30, 40, 35] },
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
const config: SmoothingTransformerOptions = { resolution: 2 };
|
||||
|
||||
const result = smoothingTransformer.transformer(config, ctx)(source);
|
||||
|
||||
// should return original frames + smoothed frames (2 original + 2 smoothed = 4 total)
|
||||
expect(result).toHaveLength(4);
|
||||
|
||||
// original frames first
|
||||
expect(result[0].name).toBe('frame1');
|
||||
expect(result[0].refId).toBe('A');
|
||||
expect(result[1].name).toBe('frame2');
|
||||
expect(result[1].refId).toBe('B');
|
||||
|
||||
// smoothed frames after
|
||||
expect(result[2].name).toBe('Smoothed');
|
||||
expect(result[2].refId).toBe('A');
|
||||
expect(result[3].name).toBe('Smoothed');
|
||||
expect(result[3].refId).toBe('B');
|
||||
});
|
||||
|
||||
it('should handle mixed frame types', () => {
|
||||
const source = [
|
||||
toDataFrame({
|
||||
name: 'valid frame',
|
||||
refId: 'A',
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [1000, 2000, 3000] },
|
||||
{ name: 'value', type: FieldType.number, values: [10, 20, 15] },
|
||||
],
|
||||
}),
|
||||
toDataFrame({
|
||||
name: 'invalid frame',
|
||||
refId: 'B',
|
||||
fields: [
|
||||
{ name: 'category', type: FieldType.string, values: ['A', 'B', 'C'] },
|
||||
{ name: 'label', type: FieldType.string, values: ['X', 'Y', 'Z'] },
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
const config: SmoothingTransformerOptions = {};
|
||||
|
||||
const result = smoothingTransformer.transformer(config, ctx)(source);
|
||||
|
||||
// should return 2 original frames + 1 smoothed frame (only valid frame gets smoothed)
|
||||
expect(result).toHaveLength(3);
|
||||
|
||||
// original frames first
|
||||
expect(result[0].name).toBe('valid frame');
|
||||
expect(result[1]).toEqual(source[1]);
|
||||
|
||||
// smoothed frame after
|
||||
expect(result[2].name).toBe('Smoothed');
|
||||
});
|
||||
});
|
||||
|
||||
describe('calculateMaxSourcePoints', () => {
|
||||
it('should return 0 for empty frames', () => {
|
||||
expect(calculateMaxSourcePoints([])).toBe(0);
|
||||
});
|
||||
|
||||
it('should return 0 for frames without time fields', () => {
|
||||
const frames = [
|
||||
toDataFrame({
|
||||
fields: [
|
||||
{ name: 'category', type: FieldType.string, values: ['A', 'B', 'C'] },
|
||||
{ name: 'value', type: FieldType.number, values: [10, 20, 15] },
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
expect(calculateMaxSourcePoints(frames)).toBe(0);
|
||||
});
|
||||
|
||||
it('should return 0 for frames without numeric fields', () => {
|
||||
const frames = [
|
||||
toDataFrame({
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [1000, 2000, 3000] },
|
||||
{ name: 'category', type: FieldType.string, values: ['A', 'B', 'C'] },
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
expect(calculateMaxSourcePoints(frames)).toBe(0);
|
||||
});
|
||||
|
||||
it('should count valid data points, filtering out null and NaN', () => {
|
||||
const frames = [
|
||||
toDataFrame({
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [1000, 2000, 3000, 4000, 5000] },
|
||||
{ name: 'value', type: FieldType.number, values: [10, null, 15, NaN, 18] },
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
// Only 3 valid points: 10, 15, 18
|
||||
expect(calculateMaxSourcePoints(frames)).toBe(3);
|
||||
});
|
||||
|
||||
it('should return maximum across multiple numeric fields', () => {
|
||||
const frames = [
|
||||
toDataFrame({
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [1000, 2000, 3000, 4000, 5000] },
|
||||
{ name: 'cpu', type: FieldType.number, values: [10, null, 15] }, // 2 valid points
|
||||
{ name: 'memory', type: FieldType.number, values: [20, 25, 30, 35] }, // 4 valid points
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
expect(calculateMaxSourcePoints(frames)).toBe(4);
|
||||
});
|
||||
|
||||
it('should return maximum across multiple frames', () => {
|
||||
const frames = [
|
||||
toDataFrame({
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [1000, 2000, 3000] },
|
||||
{ name: 'value', type: FieldType.number, values: [10, 20, 15] },
|
||||
],
|
||||
}),
|
||||
toDataFrame({
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [1000, 2000, 3000, 4000, 5000] },
|
||||
{ name: 'metric', type: FieldType.number, values: [30, 40, 35, 45, 50] },
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
expect(calculateMaxSourcePoints(frames)).toBe(5);
|
||||
});
|
||||
|
||||
it('should handle frames with all valid points', () => {
|
||||
const frames = [
|
||||
toDataFrame({
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [1000, 2000, 3000, 4000] },
|
||||
{ name: 'value', type: FieldType.number, values: [10, 20, 30, 40] },
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
expect(calculateMaxSourcePoints(frames)).toBe(4);
|
||||
});
|
||||
|
||||
it('should handle frames with all null values', () => {
|
||||
const frames = [
|
||||
toDataFrame({
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [1000, 2000, 3000] },
|
||||
{ name: 'value', type: FieldType.number, values: [null, null, null] },
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
expect(calculateMaxSourcePoints(frames)).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,267 @@
|
||||
import { map } from 'rxjs';
|
||||
|
||||
import {
|
||||
DataFrame,
|
||||
DataTransformerID,
|
||||
FieldType,
|
||||
SynchronousDataTransformerInfo,
|
||||
isTimeSeriesFrame,
|
||||
TransformationApplicabilityLevels,
|
||||
} from '@grafana/data';
|
||||
import { t } from '@grafana/i18n';
|
||||
|
||||
import { asapSmooth, DataPoint } from './asap';
|
||||
|
||||
export interface SmoothingTransformerOptions {
|
||||
resolution?: number;
|
||||
}
|
||||
|
||||
export const DEFAULTS = {
|
||||
resolution: 100,
|
||||
};
|
||||
|
||||
export const RESOLUTION_LIMITS = {
|
||||
min: 1,
|
||||
max: 1000,
|
||||
};
|
||||
|
||||
const MAX_RESOLUTION_MULTIPLIER = 2;
|
||||
|
||||
// converts time and value arrays into valid DataPoints, filtering out null/NaN values
|
||||
export const createDataPoints = (timeValues: number[], sourceField: Array<number | null | undefined>): DataPoint[] => {
|
||||
return timeValues
|
||||
.map((time, index) => ({
|
||||
x: time,
|
||||
y: sourceField[index],
|
||||
}))
|
||||
.filter((point): point is DataPoint => point.y != null && !isNaN(point.y));
|
||||
};
|
||||
|
||||
// calculates effective resolution capped at 2x source points
|
||||
export const calculateEffectiveResolution = (resolution: number, sourcePointCount: number): number => {
|
||||
return Math.min(resolution, sourcePointCount * MAX_RESOLUTION_MULTIPLIER);
|
||||
};
|
||||
|
||||
// calculates the maximum number of source points across all numeric fields in all frames
|
||||
export const calculateMaxSourcePoints = (frames: DataFrame[]): number => {
|
||||
let maxSourcePoints = 0;
|
||||
|
||||
for (const frame of frames) {
|
||||
const timeField = frame.fields.find((f) => f.type === FieldType.time);
|
||||
if (!timeField) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const field of frame.fields) {
|
||||
if (field.type === FieldType.number) {
|
||||
const sourcePoints = createDataPoints(timeField.values, field.values);
|
||||
if (sourcePoints.length > maxSourcePoints) {
|
||||
maxSourcePoints = sourcePoints.length;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return maxSourcePoints;
|
||||
};
|
||||
|
||||
// performs linear interpolation between two points
|
||||
export const linearInterpolate = (leftPoint: DataPoint, rightPoint: DataPoint, targetTime: number): number => {
|
||||
// exact match
|
||||
if (leftPoint.x === targetTime) {
|
||||
return leftPoint.y;
|
||||
}
|
||||
if (rightPoint.x === targetTime) {
|
||||
return rightPoint.y;
|
||||
}
|
||||
|
||||
// same point (shouldn't happen but handle gracefully)
|
||||
if (leftPoint.x === rightPoint.x) {
|
||||
return leftPoint.y;
|
||||
}
|
||||
|
||||
// linear interpolation
|
||||
const ratio = (targetTime - leftPoint.x) / (rightPoint.x - leftPoint.x);
|
||||
return leftPoint.y + ratio * (rightPoint.y - leftPoint.y);
|
||||
};
|
||||
|
||||
// finds the two points in smoothedData that bracket the targetTime
|
||||
export const findBracketingPoints = (
|
||||
smoothedData: DataPoint[],
|
||||
targetTime: number,
|
||||
lastIndex: number
|
||||
): { leftPoint: DataPoint; rightPoint: DataPoint; newIndex: number } => {
|
||||
// find the two points to interpolate between, starting from last known position
|
||||
// if target is before our current search position, reset to beginning
|
||||
let searchStart = Math.min(lastIndex, smoothedData.length - 2);
|
||||
if (targetTime < smoothedData[searchStart].x) {
|
||||
searchStart = 0;
|
||||
}
|
||||
|
||||
let leftPoint = smoothedData[searchStart];
|
||||
let rightPoint = smoothedData[searchStart + 1];
|
||||
let newIndex = searchStart;
|
||||
|
||||
for (let i = searchStart; i < smoothedData.length - 1; i++) {
|
||||
if (smoothedData[i].x <= targetTime && smoothedData[i + 1].x >= targetTime) {
|
||||
leftPoint = smoothedData[i];
|
||||
rightPoint = smoothedData[i + 1];
|
||||
newIndex = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return { leftPoint, rightPoint, newIndex };
|
||||
};
|
||||
|
||||
// interpolates smoothed data back to original time points
|
||||
export const interpolateToTimePoints = (smoothedData: DataPoint[], timeValues: number[]): number[] => {
|
||||
const firstPoint = smoothedData[0];
|
||||
const lastPoint = smoothedData[smoothedData.length - 1];
|
||||
|
||||
let lastIndex = 0;
|
||||
return timeValues.map((targetTime) => {
|
||||
// handle out of bounds, use edge values instead of null
|
||||
if (targetTime <= firstPoint.x) {
|
||||
return firstPoint.y;
|
||||
}
|
||||
if (targetTime >= lastPoint.x) {
|
||||
return lastPoint.y;
|
||||
}
|
||||
|
||||
const { leftPoint, rightPoint, newIndex } = findBracketingPoints(smoothedData, targetTime, lastIndex);
|
||||
lastIndex = newIndex;
|
||||
|
||||
return linearInterpolate(leftPoint, rightPoint, targetTime);
|
||||
});
|
||||
};
|
||||
|
||||
// smooths a time series by creating a smoothed curve and interpolating back to original time points
|
||||
export const interpolateFromSmoothedCurve = (
|
||||
sourceField: Array<number | null | undefined>,
|
||||
timeValues: number[],
|
||||
resolution: number
|
||||
): Array<number | null> | null => {
|
||||
const sourcePoints = createDataPoints(timeValues, sourceField);
|
||||
|
||||
// if no valid source points, return null to signal this field should not be smoothed
|
||||
if (sourcePoints.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// smooth the source field's data with effective resolution
|
||||
const effectiveFieldResolution = calculateEffectiveResolution(resolution, sourcePoints.length);
|
||||
const smoothedData = asapSmooth(sourcePoints, { resolution: effectiveFieldResolution });
|
||||
|
||||
if (smoothedData.length === 0) {
|
||||
return timeValues.map(() => null);
|
||||
}
|
||||
|
||||
// handle single point case - return the same value for all time points
|
||||
if (smoothedData.length === 1) {
|
||||
const singleValue = smoothedData[0].y;
|
||||
return timeValues.map(() => singleValue);
|
||||
}
|
||||
|
||||
// this prevents O(m×n) degradation if asapSmooth returns unsorted data
|
||||
smoothedData.sort((a, b) => a.x - b.x);
|
||||
|
||||
// interpolate smoothed data back to original time points
|
||||
return interpolateToTimePoints(smoothedData, timeValues);
|
||||
};
|
||||
|
||||
export const getSmoothingTransformer: () => SynchronousDataTransformerInfo<SmoothingTransformerOptions> = () => ({
|
||||
id: DataTransformerID.smoothing,
|
||||
name: t('transformers.smoothing.name', 'Smoothing'),
|
||||
description: t(
|
||||
'transformers.smoothing.description',
|
||||
'Reduce noise in time series data through adaptive downsampling.'
|
||||
),
|
||||
isApplicable: (data) => {
|
||||
for (const frame of data) {
|
||||
if (isTimeSeriesFrame(frame)) {
|
||||
return TransformationApplicabilityLevels.Applicable;
|
||||
}
|
||||
}
|
||||
|
||||
return TransformationApplicabilityLevels.NotApplicable;
|
||||
},
|
||||
isApplicableDescription: t(
|
||||
'transformers.smoothing.is-applicable-description',
|
||||
'The Smoothing transformation requires at least one time series frame to function. You currently have none.'
|
||||
),
|
||||
operator: (options, ctx) => {
|
||||
const transformer = getSmoothingTransformer().transformer(options, ctx);
|
||||
return (source) => source.pipe(map(transformer));
|
||||
},
|
||||
transformer: (options, ctx) => {
|
||||
return (frames: DataFrame[]) => {
|
||||
// clamp resolution to valid range to handle edge cases from API/plugins
|
||||
const rawResolution = options.resolution ?? DEFAULTS.resolution;
|
||||
const resolution = Math.max(RESOLUTION_LIMITS.min, Math.min(RESOLUTION_LIMITS.max, rawResolution));
|
||||
|
||||
if (frames.length === 0) {
|
||||
return frames;
|
||||
}
|
||||
|
||||
const smoothedFrames: DataFrame[] = [];
|
||||
|
||||
for (const frame of frames) {
|
||||
const timeField = frame.fields.find((f) => f.type === FieldType.time);
|
||||
if (!timeField) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// check if there's at least one numeric field with valid data
|
||||
const hasValidNumericField = frame.fields.some((f) => {
|
||||
if (f.type !== FieldType.number || f.values.length === 0) {
|
||||
return false;
|
||||
}
|
||||
return f.values.some((v) => v != null && !isNaN(v));
|
||||
});
|
||||
|
||||
if (!hasValidNumericField) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// create smoothed fields for all numeric fields
|
||||
const smoothedFields = [timeField]; // keep original time field
|
||||
let anyFieldSmoothed = false;
|
||||
|
||||
for (const field of frame.fields) {
|
||||
if (field.type === FieldType.number) {
|
||||
const smoothedValues = interpolateFromSmoothedCurve(field.values, timeField.values, resolution);
|
||||
|
||||
// if smoothing returned null (no valid data), skip this field
|
||||
if (smoothedValues === null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
anyFieldSmoothed = true;
|
||||
smoothedFields.push({
|
||||
...field,
|
||||
values: smoothedValues,
|
||||
state: undefined,
|
||||
});
|
||||
} else if (field.type !== FieldType.time) {
|
||||
// include other non-numeric, non-time fields (like labels)
|
||||
smoothedFields.push(field);
|
||||
}
|
||||
}
|
||||
|
||||
// only create a smoothed frame if at least one field was smoothed
|
||||
if (anyFieldSmoothed) {
|
||||
const smoothedFrame: DataFrame = {
|
||||
...frame,
|
||||
name: 'Smoothed',
|
||||
fields: smoothedFields,
|
||||
};
|
||||
smoothedFrames.push(smoothedFrame);
|
||||
}
|
||||
}
|
||||
|
||||
// return original frames followed by smoothed frames
|
||||
return [...frames, ...smoothedFrames];
|
||||
};
|
||||
},
|
||||
});
|
||||
@@ -0,0 +1,93 @@
|
||||
import { css } from '@emotion/css';
|
||||
import { useMemo } from 'react';
|
||||
|
||||
import { DataTransformerID, TransformerRegistryItem, TransformerUIProps, TransformerCategory } from '@grafana/data';
|
||||
import { t } from '@grafana/i18n';
|
||||
import { InlineField, InlineFieldRow, Tooltip, useTheme2 } from '@grafana/ui';
|
||||
import { NumberInput } from 'app/core/components/OptionsUI/NumberInput';
|
||||
|
||||
import { getTransformationContent } from '../docs/getTransformationContent';
|
||||
import darkImage from '../images/dark/smoothing.svg';
|
||||
import lightImage from '../images/light/smoothing.svg';
|
||||
|
||||
import {
|
||||
DEFAULTS,
|
||||
RESOLUTION_LIMITS,
|
||||
SmoothingTransformerOptions,
|
||||
getSmoothingTransformer,
|
||||
calculateEffectiveResolution,
|
||||
calculateMaxSourcePoints,
|
||||
} from './smoothing';
|
||||
|
||||
export const SmoothingTransformerEditor = ({
|
||||
input,
|
||||
options,
|
||||
onChange,
|
||||
}: TransformerUIProps<SmoothingTransformerOptions>) => {
|
||||
const theme = useTheme2();
|
||||
const resolution = options.resolution ?? DEFAULTS.resolution;
|
||||
|
||||
const maxSourcePoints = useMemo(() => calculateMaxSourcePoints(input), [input]);
|
||||
const effectiveResolution = maxSourcePoints > 0 ? calculateEffectiveResolution(resolution, maxSourcePoints) : null;
|
||||
const showEffectiveResolution = effectiveResolution !== null && effectiveResolution < resolution;
|
||||
|
||||
return (
|
||||
<InlineFieldRow>
|
||||
<InlineField
|
||||
label={t('transformers.smoothing.resolution.label', 'Resolution')}
|
||||
labelWidth={12}
|
||||
tooltip={t(
|
||||
'transformers.smoothing.resolution.tooltip',
|
||||
'Controls smoothing intensity. Lower values create more aggressive smoothing. Both original and smoothed data are displayed.'
|
||||
)}
|
||||
>
|
||||
<NumberInput
|
||||
value={resolution}
|
||||
onChange={(v) => onChange({ ...options, resolution: v })}
|
||||
min={RESOLUTION_LIMITS.min}
|
||||
max={RESOLUTION_LIMITS.max}
|
||||
width={20}
|
||||
suffix={
|
||||
showEffectiveResolution ? (
|
||||
<Tooltip
|
||||
content={t(
|
||||
'transformers.smoothing.effective-resolution-tooltip',
|
||||
'Resolution is limited to 2× the number of data points ({{points}}).',
|
||||
{ points: maxSourcePoints }
|
||||
)}
|
||||
>
|
||||
<span
|
||||
className={css({
|
||||
marginLeft: '8px',
|
||||
color: theme.colors.text.secondary,
|
||||
fontSize: theme.typography.bodySmall.fontSize,
|
||||
})}
|
||||
>
|
||||
{t('transformers.smoothing.effective-resolution', 'Effective: {{value}}', {
|
||||
value: effectiveResolution,
|
||||
})}
|
||||
</span>
|
||||
</Tooltip>
|
||||
) : undefined
|
||||
}
|
||||
/>
|
||||
</InlineField>
|
||||
</InlineFieldRow>
|
||||
);
|
||||
};
|
||||
|
||||
export const getSmoothingTransformerRegistryItem: () => TransformerRegistryItem<SmoothingTransformerOptions> = () => {
|
||||
const smoothingTransformer = getSmoothingTransformer();
|
||||
return {
|
||||
id: DataTransformerID.smoothing,
|
||||
editor: SmoothingTransformerEditor,
|
||||
transformation: smoothingTransformer,
|
||||
name: smoothingTransformer.name,
|
||||
description: smoothingTransformer.description,
|
||||
categories: new Set([TransformerCategory.CalculateNewFields]),
|
||||
imageDark: darkImage,
|
||||
imageLight: lightImage,
|
||||
help: getTransformationContent(DataTransformerID.smoothing).helperDocs,
|
||||
tags: new Set(['ASAP', 'Autosmooth']),
|
||||
};
|
||||
};
|
||||
@@ -1,4 +1,5 @@
|
||||
import { TransformerRegistryItem } from '@grafana/data';
|
||||
import { config } from '@grafana/runtime';
|
||||
|
||||
import { getFilterByValueTransformRegistryItem } from './FilterByValueTransformer/FilterByValueTransformerEditor';
|
||||
import { getHeatmapTransformRegistryItem } from './calculateHeatmap/HeatmapTransformerEditor';
|
||||
@@ -31,6 +32,7 @@ import { getPartitionByValuesTransformRegistryItem } from './partitionByValues/P
|
||||
import { getPrepareTimeseriesTransformerRegistryItem } from './prepareTimeSeries/PrepareTimeSeriesEditor';
|
||||
import { getRegressionTransformerRegistryItem } from './regression/regressionEditor';
|
||||
import { getRowsToFieldsTransformRegistryItem } from './rowsToFields/RowsToFieldsTransformerEditor';
|
||||
import { getSmoothingTransformerRegistryItem } from './smoothing/smoothingEditor';
|
||||
import { getSpatialTransformRegistryItem } from './spatial/SpatialTransformerEditor';
|
||||
import { getTimeSeriesTableTransformRegistryItem } from './timeSeriesTable/TimeSeriesTableTransformEditor';
|
||||
|
||||
@@ -66,6 +68,7 @@ export const getStandardTransformers = (): TransformerRegistryItem[] => {
|
||||
getPartitionByValuesTransformRegistryItem(),
|
||||
getFormatStringTransformerRegistryItem(),
|
||||
getGroupToNestedTableTransformRegistryItem(),
|
||||
...(config.featureToggles.smoothingTransformation ? [getSmoothingTransformerRegistryItem()] : []),
|
||||
getFormatTimeTransformerRegistryItem(),
|
||||
getTimeSeriesTableTransformRegistryItem(),
|
||||
getTransposeTransformerRegistryItem(),
|
||||
|
||||
@@ -14399,6 +14399,17 @@
|
||||
"series-to-rows": "Series to rows"
|
||||
}
|
||||
},
|
||||
"smoothing": {
|
||||
"description": "Reduce noise in time series data through adaptive downsampling.",
|
||||
"effective-resolution": "Effective: {{value}}",
|
||||
"effective-resolution-tooltip": "Resolution is limited to 2× the number of data points ({{points}}).",
|
||||
"is-applicable-description": "The Smoothing transformation requires at least one time series frame to function. You currently have none.",
|
||||
"name": "Smoothing",
|
||||
"resolution": {
|
||||
"label": "Resolution",
|
||||
"tooltip": "Controls smoothing intensity. Lower values create more aggressive smoothing. Both original and smoothed data are displayed."
|
||||
}
|
||||
},
|
||||
"sort-by-transformer-editor": {
|
||||
"description": {
|
||||
"sort-fields": "Sort fields in a frame."
|
||||
|
||||
@@ -16497,6 +16497,13 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"downsample@npm:1.4.0":
|
||||
version: 1.4.0
|
||||
resolution: "downsample@npm:1.4.0"
|
||||
checksum: 10/ad0ab937e368546b577b564b13d7f39cd85a92bf29d56562aaa6ed10bac19e91ee75ab58f38050a9e8bf601c1abcfda942541880a84c89ba78d1775a229636d1
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"downshift@npm:^9.0.6":
|
||||
version: 9.0.10
|
||||
resolution: "downshift@npm:9.0.10"
|
||||
@@ -19629,6 +19636,7 @@ __metadata:
|
||||
date-fns: "npm:4.1.0"
|
||||
debounce-promise: "npm:3.1.2"
|
||||
diff: "npm:^8.0.0"
|
||||
downsample: "npm:1.4.0"
|
||||
enquirer: "npm:^2.4.1"
|
||||
esbuild: "npm:0.25.8"
|
||||
esbuild-loader: "npm:4.3.0"
|
||||
|
||||
Reference in New Issue
Block a user