Prometheus: Make sure "Min Step" has precedence (#115941)
* set minStep value as final step value when set explicitly. * enhance it with tests * improve function readability * a bit more improvement for readability
This commit is contained in:
602
pkg/promlib/models/interval_test.go
Normal file
602
pkg/promlib/models/interval_test.go
Normal file
@@ -0,0 +1,602 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/stretchr/testify/require"
|
||||
"go.opentelemetry.io/otel"
|
||||
|
||||
"github.com/grafana/grafana/pkg/promlib/intervalv2"
|
||||
)
|
||||
|
||||
var (
|
||||
testNow = time.Now()
|
||||
testIntervalCalculator = intervalv2.NewCalculator()
|
||||
testTracer = otel.Tracer("test/interval")
|
||||
)
|
||||
|
||||
func TestCalculatePrometheusInterval(t *testing.T) {
|
||||
_, span := testTracer.Start(context.Background(), "test")
|
||||
defer span.End()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
queryInterval string
|
||||
dsScrapeInterval string
|
||||
intervalMs int64
|
||||
intervalFactor int64
|
||||
query backend.DataQuery
|
||||
want time.Duration
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "min step 2m with 300000 intervalMs",
|
||||
queryInterval: "2m",
|
||||
dsScrapeInterval: "",
|
||||
intervalMs: 300000,
|
||||
intervalFactor: 1,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(48 * time.Hour),
|
||||
},
|
||||
Interval: 5 * time.Minute,
|
||||
MaxDataPoints: 761,
|
||||
},
|
||||
want: 2 * time.Minute,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "min step 2m with 900000 intervalMs",
|
||||
queryInterval: "2m",
|
||||
dsScrapeInterval: "",
|
||||
intervalMs: 900000,
|
||||
intervalFactor: 1,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(48 * time.Hour),
|
||||
},
|
||||
Interval: 15 * time.Minute,
|
||||
MaxDataPoints: 175,
|
||||
},
|
||||
want: 2 * time.Minute,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "with step parameter",
|
||||
queryInterval: "",
|
||||
dsScrapeInterval: "15s",
|
||||
intervalMs: 0,
|
||||
intervalFactor: 1,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(12 * time.Hour),
|
||||
},
|
||||
Interval: 1 * time.Minute,
|
||||
},
|
||||
want: 30 * time.Second,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "without step parameter",
|
||||
queryInterval: "",
|
||||
dsScrapeInterval: "15s",
|
||||
intervalMs: 0,
|
||||
intervalFactor: 1,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(1 * time.Hour),
|
||||
},
|
||||
Interval: 1 * time.Minute,
|
||||
},
|
||||
want: 15 * time.Second,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "with high intervalFactor",
|
||||
queryInterval: "",
|
||||
dsScrapeInterval: "15s",
|
||||
intervalMs: 0,
|
||||
intervalFactor: 10,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(48 * time.Hour),
|
||||
},
|
||||
Interval: 1 * time.Minute,
|
||||
},
|
||||
want: 20 * time.Minute,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "with low intervalFactor",
|
||||
queryInterval: "",
|
||||
dsScrapeInterval: "15s",
|
||||
intervalMs: 0,
|
||||
intervalFactor: 1,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(48 * time.Hour),
|
||||
},
|
||||
Interval: 1 * time.Minute,
|
||||
},
|
||||
want: 2 * time.Minute,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "with specified scrape-interval in data source",
|
||||
queryInterval: "",
|
||||
dsScrapeInterval: "240s",
|
||||
intervalMs: 0,
|
||||
intervalFactor: 1,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(48 * time.Hour),
|
||||
},
|
||||
Interval: 1 * time.Minute,
|
||||
},
|
||||
want: 4 * time.Minute,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "with zero intervalFactor defaults to 1",
|
||||
queryInterval: "",
|
||||
dsScrapeInterval: "15s",
|
||||
intervalMs: 0,
|
||||
intervalFactor: 0,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(1 * time.Hour),
|
||||
},
|
||||
Interval: 1 * time.Minute,
|
||||
},
|
||||
want: 15 * time.Second,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "with $__interval variable",
|
||||
queryInterval: "$__interval",
|
||||
dsScrapeInterval: "15s",
|
||||
intervalMs: 60000,
|
||||
intervalFactor: 1,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(48 * time.Hour),
|
||||
},
|
||||
Interval: 1 * time.Minute,
|
||||
},
|
||||
want: 120 * time.Second,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "with ${__interval} variable",
|
||||
queryInterval: "${__interval}",
|
||||
dsScrapeInterval: "15s",
|
||||
intervalMs: 60000,
|
||||
intervalFactor: 1,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(48 * time.Hour),
|
||||
},
|
||||
Interval: 1 * time.Minute,
|
||||
},
|
||||
want: 120 * time.Second,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "with ${__interval} variable and explicit interval",
|
||||
queryInterval: "1m",
|
||||
dsScrapeInterval: "15s",
|
||||
intervalMs: 60000,
|
||||
intervalFactor: 1,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(48 * time.Hour),
|
||||
},
|
||||
Interval: 1 * time.Minute,
|
||||
},
|
||||
want: 1 * time.Minute,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "with $__rate_interval variable",
|
||||
queryInterval: "$__rate_interval",
|
||||
dsScrapeInterval: "30s",
|
||||
intervalMs: 100000,
|
||||
intervalFactor: 1,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(2 * 24 * time.Hour),
|
||||
},
|
||||
Interval: 100 * time.Second,
|
||||
MaxDataPoints: 12384,
|
||||
},
|
||||
want: 130 * time.Second,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "with ${__rate_interval} variable",
|
||||
queryInterval: "${__rate_interval}",
|
||||
dsScrapeInterval: "30s",
|
||||
intervalMs: 100000,
|
||||
intervalFactor: 1,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(2 * 24 * time.Hour),
|
||||
},
|
||||
Interval: 100 * time.Second,
|
||||
MaxDataPoints: 12384,
|
||||
},
|
||||
want: 130 * time.Second,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "intervalMs 100s, minStep override 150s and scrape interval 30s",
|
||||
queryInterval: "150s",
|
||||
dsScrapeInterval: "30s",
|
||||
intervalMs: 100000,
|
||||
intervalFactor: 1,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(2 * 24 * time.Hour),
|
||||
},
|
||||
Interval: 100 * time.Second,
|
||||
MaxDataPoints: 12384,
|
||||
},
|
||||
want: 150 * time.Second,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "intervalMs 120s, minStep override 150s and ds scrape interval 30s",
|
||||
queryInterval: "150s",
|
||||
dsScrapeInterval: "30s",
|
||||
intervalMs: 120000,
|
||||
intervalFactor: 1,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(2 * 24 * time.Hour),
|
||||
},
|
||||
Interval: 120 * time.Second,
|
||||
MaxDataPoints: 12384,
|
||||
},
|
||||
want: 150 * time.Second,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "intervalMs 120s, minStep auto (interval not overridden) and ds scrape interval 30s",
|
||||
queryInterval: "120s",
|
||||
dsScrapeInterval: "30s",
|
||||
intervalMs: 120000,
|
||||
intervalFactor: 1,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(2 * 24 * time.Hour),
|
||||
},
|
||||
Interval: 120 * time.Second,
|
||||
MaxDataPoints: 12384,
|
||||
},
|
||||
want: 120 * time.Second,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "interval and minStep are automatically calculated and ds scrape interval 30s and time range 1 hour",
|
||||
queryInterval: "30s",
|
||||
dsScrapeInterval: "30s",
|
||||
intervalMs: 30000,
|
||||
intervalFactor: 1,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(1 * time.Hour),
|
||||
},
|
||||
Interval: 30 * time.Second,
|
||||
MaxDataPoints: 12384,
|
||||
},
|
||||
want: 30 * time.Second,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "minStep is $__rate_interval and ds scrape interval 30s and time range 1 hour",
|
||||
queryInterval: "$__rate_interval",
|
||||
dsScrapeInterval: "30s",
|
||||
intervalMs: 30000,
|
||||
intervalFactor: 1,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(1 * time.Hour),
|
||||
},
|
||||
Interval: 30 * time.Second,
|
||||
MaxDataPoints: 12384,
|
||||
},
|
||||
want: 2 * time.Minute,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "minStep is $__rate_interval and ds scrape interval 30s and time range 2 days",
|
||||
queryInterval: "$__rate_interval",
|
||||
dsScrapeInterval: "30s",
|
||||
intervalMs: 120000,
|
||||
intervalFactor: 1,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(2 * 24 * time.Hour),
|
||||
},
|
||||
Interval: 120 * time.Second,
|
||||
MaxDataPoints: 12384,
|
||||
},
|
||||
want: 150 * time.Second,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "minStep is $__interval and ds scrape interval 15s and time range 2 days",
|
||||
queryInterval: "$__interval",
|
||||
dsScrapeInterval: "15s",
|
||||
intervalMs: 120000,
|
||||
intervalFactor: 1,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(2 * 24 * time.Hour),
|
||||
},
|
||||
Interval: 120 * time.Second,
|
||||
MaxDataPoints: 12384,
|
||||
},
|
||||
want: 120 * time.Second,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "with empty dsScrapeInterval defaults to 15s",
|
||||
queryInterval: "",
|
||||
dsScrapeInterval: "",
|
||||
intervalMs: 0,
|
||||
intervalFactor: 1,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(1 * time.Hour),
|
||||
},
|
||||
Interval: 1 * time.Minute,
|
||||
},
|
||||
want: 15 * time.Second,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "with very short time range",
|
||||
queryInterval: "",
|
||||
dsScrapeInterval: "15s",
|
||||
intervalMs: 0,
|
||||
intervalFactor: 1,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(1 * time.Minute),
|
||||
},
|
||||
Interval: 1 * time.Minute,
|
||||
},
|
||||
want: 15 * time.Second,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "with very long time range",
|
||||
queryInterval: "",
|
||||
dsScrapeInterval: "15s",
|
||||
intervalMs: 0,
|
||||
intervalFactor: 1,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(30 * 24 * time.Hour),
|
||||
},
|
||||
Interval: 1 * time.Minute,
|
||||
},
|
||||
want: 30 * time.Minute,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "with manual interval override",
|
||||
queryInterval: "5m",
|
||||
dsScrapeInterval: "15s",
|
||||
intervalMs: 0,
|
||||
intervalFactor: 1,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(48 * time.Hour),
|
||||
},
|
||||
Interval: 1 * time.Minute,
|
||||
},
|
||||
want: 5 * time.Minute,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "minStep is auto and ds scrape interval 30s and time range 1 hour",
|
||||
queryInterval: "",
|
||||
dsScrapeInterval: "30s",
|
||||
intervalMs: 30000,
|
||||
intervalFactor: 1,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(1 * time.Hour),
|
||||
},
|
||||
Interval: 30 * time.Second,
|
||||
MaxDataPoints: 1613,
|
||||
},
|
||||
want: 30 * time.Second,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "minStep is auto and ds scrape interval 15s and time range 5 minutes",
|
||||
queryInterval: "",
|
||||
dsScrapeInterval: "15s",
|
||||
intervalMs: 15000,
|
||||
intervalFactor: 1,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(5 * time.Minute),
|
||||
},
|
||||
Interval: 15 * time.Second,
|
||||
MaxDataPoints: 1055,
|
||||
},
|
||||
want: 15 * time.Second,
|
||||
wantErr: false,
|
||||
},
|
||||
// Additional test cases for better coverage
|
||||
{
|
||||
name: "with $__interval_ms variable",
|
||||
queryInterval: "$__interval_ms",
|
||||
dsScrapeInterval: "15s",
|
||||
intervalMs: 60000,
|
||||
intervalFactor: 1,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(48 * time.Hour),
|
||||
},
|
||||
Interval: 1 * time.Minute,
|
||||
},
|
||||
want: 120 * time.Second,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "with ${__interval_ms} variable",
|
||||
queryInterval: "${__interval_ms}",
|
||||
dsScrapeInterval: "15s",
|
||||
intervalMs: 60000,
|
||||
intervalFactor: 1,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(48 * time.Hour),
|
||||
},
|
||||
Interval: 1 * time.Minute,
|
||||
},
|
||||
want: 120 * time.Second,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "with MaxDataPoints zero",
|
||||
queryInterval: "",
|
||||
dsScrapeInterval: "15s",
|
||||
intervalMs: 0,
|
||||
intervalFactor: 1,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(1 * time.Hour),
|
||||
},
|
||||
Interval: 1 * time.Minute,
|
||||
MaxDataPoints: 0,
|
||||
},
|
||||
want: 15 * time.Second,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "with negative intervalFactor",
|
||||
queryInterval: "",
|
||||
dsScrapeInterval: "15s",
|
||||
intervalMs: 0,
|
||||
intervalFactor: -5,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(48 * time.Hour),
|
||||
},
|
||||
Interval: 1 * time.Minute,
|
||||
},
|
||||
want: -10 * time.Minute,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "with invalid interval string that fails parsing",
|
||||
queryInterval: "invalid-interval",
|
||||
dsScrapeInterval: "15s",
|
||||
intervalMs: 0,
|
||||
intervalFactor: 1,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(48 * time.Hour),
|
||||
},
|
||||
Interval: 1 * time.Minute,
|
||||
},
|
||||
want: time.Duration(0),
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "with very small MaxDataPoints",
|
||||
queryInterval: "",
|
||||
dsScrapeInterval: "15s",
|
||||
intervalMs: 0,
|
||||
intervalFactor: 1,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(1 * time.Hour),
|
||||
},
|
||||
Interval: 1 * time.Minute,
|
||||
MaxDataPoints: 10,
|
||||
},
|
||||
want: 5 * time.Minute,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "when safeInterval is larger than calculatedInterval",
|
||||
queryInterval: "",
|
||||
dsScrapeInterval: "15s",
|
||||
intervalMs: 0,
|
||||
intervalFactor: 1,
|
||||
query: backend.DataQuery{
|
||||
TimeRange: backend.TimeRange{
|
||||
From: testNow,
|
||||
To: testNow.Add(1 * time.Hour),
|
||||
},
|
||||
Interval: 1 * time.Minute,
|
||||
MaxDataPoints: 10000,
|
||||
},
|
||||
want: 15 * time.Second,
|
||||
wantErr: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got, err := calculatePrometheusInterval(
|
||||
tt.queryInterval,
|
||||
tt.dsScrapeInterval,
|
||||
tt.intervalMs,
|
||||
tt.intervalFactor,
|
||||
tt.query,
|
||||
testIntervalCalculator,
|
||||
)
|
||||
|
||||
if tt.wantErr {
|
||||
require.Error(t, err)
|
||||
return
|
||||
}
|
||||
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, tt.want, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -92,7 +92,6 @@ const (
|
||||
)
|
||||
|
||||
// Internal interval and range variables with {} syntax
|
||||
// Repetitive code, we should have functionality to unify these
|
||||
const (
|
||||
varIntervalAlt = "${__interval}"
|
||||
varIntervalMsAlt = "${__interval_ms}"
|
||||
@@ -112,8 +111,16 @@ const (
|
||||
UnknownQueryType TimeSeriesQueryType = "unknown"
|
||||
)
|
||||
|
||||
// safeResolution is the maximum number of data points to prevent excessive resolution.
|
||||
// This ensures queries don't exceed reasonable data point limits, improving performance
|
||||
// and preventing potential memory issues. The value of 11000 provides a good balance
|
||||
// between resolution and performance for most use cases.
|
||||
var safeResolution = 11000
|
||||
|
||||
// rateIntervalMultiplier is the minimum multiplier for rate interval calculation.
|
||||
// Rate intervals should be at least 4x the scrape interval to ensure accurate rate calculations.
|
||||
const rateIntervalMultiplier = 4
|
||||
|
||||
// QueryModel includes both the common and specific values
|
||||
// NOTE: this struct may have issues when decoding JSON that requires the special handling
|
||||
// registered in https://github.com/grafana/grafana-plugin-sdk-go/blob/v0.228.0/experimental/apis/data/v0alpha1/query.go#L298
|
||||
@@ -154,7 +161,7 @@ type Query struct {
|
||||
// may be either a string or DataSourceRef
|
||||
type internalQueryModel struct {
|
||||
PrometheusQueryProperties `json:",inline"`
|
||||
//sdkapi.CommonQueryProperties `json:",inline"`
|
||||
// sdkapi.CommonQueryProperties `json:",inline"`
|
||||
IntervalMS float64 `json:"intervalMs,omitempty"`
|
||||
|
||||
// The following properties may be part of the request payload, however they are not saved in panel JSON
|
||||
@@ -272,44 +279,121 @@ func (query *Query) TimeRange() TimeRange {
|
||||
}
|
||||
}
|
||||
|
||||
// isRateIntervalVariable checks if the interval string is a rate interval variable
|
||||
// ($__rate_interval, ${__rate_interval}, $__rate_interval_ms, or ${__rate_interval_ms})
|
||||
func isRateIntervalVariable(interval string) bool {
|
||||
return interval == varRateInterval ||
|
||||
interval == varRateIntervalAlt ||
|
||||
interval == varRateIntervalMs ||
|
||||
interval == varRateIntervalMsAlt
|
||||
}
|
||||
|
||||
// replaceVariable replaces both $__variable and ${__variable} formats in the expression
|
||||
func replaceVariable(expr, dollarFormat, altFormat, replacement string) string {
|
||||
expr = strings.ReplaceAll(expr, dollarFormat, replacement)
|
||||
expr = strings.ReplaceAll(expr, altFormat, replacement)
|
||||
return expr
|
||||
}
|
||||
|
||||
// isManualIntervalOverride checks if the interval is a manually specified non-variable value
|
||||
// that should override the calculated interval
|
||||
func isManualIntervalOverride(interval string) bool {
|
||||
return interval != "" &&
|
||||
interval != varInterval &&
|
||||
interval != varIntervalAlt &&
|
||||
interval != varIntervalMs &&
|
||||
interval != varIntervalMsAlt
|
||||
}
|
||||
|
||||
// maxDuration returns the maximum of two durations
|
||||
func maxDuration(a, b time.Duration) time.Duration {
|
||||
if a > b {
|
||||
return a
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
// normalizeIntervalFactor ensures intervalFactor is at least 1
|
||||
func normalizeIntervalFactor(factor int64) int64 {
|
||||
if factor == 0 {
|
||||
return 1
|
||||
}
|
||||
return factor
|
||||
}
|
||||
|
||||
// calculatePrometheusInterval calculates the optimal step interval for a Prometheus query.
|
||||
//
|
||||
// The function determines the query step interval by considering multiple factors:
|
||||
// - The minimum step specified in the query (queryInterval)
|
||||
// - The data source scrape interval (dsScrapeInterval)
|
||||
// - The requested interval in milliseconds (intervalMs)
|
||||
// - The time range and maximum data points from the query
|
||||
// - The interval factor multiplier
|
||||
//
|
||||
// Special handling:
|
||||
// - Variable intervals ($__interval, $__rate_interval, etc.) are replaced with calculated values
|
||||
// - Rate interval variables ($__rate_interval, ${__rate_interval}) use calculateRateInterval for proper rate() function support
|
||||
// - Manual interval overrides (non-variable strings) take precedence over calculated values
|
||||
// - The final interval ensures safe resolution limits are not exceeded
|
||||
//
|
||||
// Parameters:
|
||||
// - queryInterval: The minimum step interval string (may contain variables like $__interval or $__rate_interval)
|
||||
// - dsScrapeInterval: The data source scrape interval (e.g., "15s", "30s")
|
||||
// - intervalMs: The requested interval in milliseconds
|
||||
// - intervalFactor: Multiplier for the calculated interval (defaults to 1 if 0)
|
||||
// - query: The backend data query containing time range and max data points
|
||||
// - intervalCalculator: Calculator for determining optimal intervals
|
||||
//
|
||||
// Returns:
|
||||
// - The calculated step interval as a time.Duration
|
||||
// - An error if the interval cannot be calculated (e.g., invalid interval string)
|
||||
func calculatePrometheusInterval(
|
||||
queryInterval, dsScrapeInterval string,
|
||||
intervalMs, intervalFactor int64,
|
||||
query backend.DataQuery,
|
||||
intervalCalculator intervalv2.Calculator,
|
||||
) (time.Duration, error) {
|
||||
// we need to compare the original query model after it is overwritten below to variables so that we can
|
||||
// calculate the rateInterval if it is equal to $__rate_interval or ${__rate_interval}
|
||||
// Preserve the original interval for later comparison, as it may be modified below
|
||||
originalQueryInterval := queryInterval
|
||||
|
||||
// If we are using variable for interval/step, we will replace it with calculated interval
|
||||
// If we are using a variable for minStep, replace it with empty string
|
||||
// so that the interval calculation proceeds with the default logic
|
||||
if isVariableInterval(queryInterval) {
|
||||
queryInterval = ""
|
||||
}
|
||||
|
||||
// Get the minimum interval from various sources (dsScrapeInterval, queryInterval, intervalMs)
|
||||
minInterval, err := gtime.GetIntervalFrom(dsScrapeInterval, queryInterval, intervalMs, 15*time.Second)
|
||||
if err != nil {
|
||||
return time.Duration(0), err
|
||||
}
|
||||
|
||||
// Calculate the optimal interval based on time range and max data points
|
||||
calculatedInterval := intervalCalculator.Calculate(query.TimeRange, minInterval, query.MaxDataPoints)
|
||||
// Calculate the safe interval to prevent too many data points
|
||||
safeInterval := intervalCalculator.CalculateSafeInterval(query.TimeRange, int64(safeResolution))
|
||||
|
||||
adjustedInterval := safeInterval.Value
|
||||
if calculatedInterval.Value > safeInterval.Value {
|
||||
adjustedInterval = calculatedInterval.Value
|
||||
}
|
||||
// Use the larger of calculated or safe interval to ensure we don't exceed resolution limits
|
||||
adjustedInterval := maxDuration(calculatedInterval.Value, safeInterval.Value)
|
||||
|
||||
// here is where we compare for $__rate_interval or ${__rate_interval}
|
||||
if originalQueryInterval == varRateInterval || originalQueryInterval == varRateIntervalAlt {
|
||||
// Handle rate interval variables: these require special calculation
|
||||
if isRateIntervalVariable(originalQueryInterval) {
|
||||
// Rate interval is final and is not affected by resolution
|
||||
return calculateRateInterval(adjustedInterval, dsScrapeInterval), nil
|
||||
} else {
|
||||
queryIntervalFactor := intervalFactor
|
||||
if queryIntervalFactor == 0 {
|
||||
queryIntervalFactor = 1
|
||||
}
|
||||
return time.Duration(int64(adjustedInterval) * queryIntervalFactor), nil
|
||||
}
|
||||
|
||||
// Handle manual interval override: if user specified a non-variable interval,
|
||||
// it takes precedence over calculated values
|
||||
if isManualIntervalOverride(originalQueryInterval) {
|
||||
if parsedInterval, err := gtime.ParseIntervalStringToTimeDuration(originalQueryInterval); err == nil {
|
||||
return parsedInterval, nil
|
||||
}
|
||||
// If parsing fails, fall through to calculated interval with factor
|
||||
}
|
||||
|
||||
// Apply interval factor to the adjusted interval
|
||||
normalizedFactor := normalizeIntervalFactor(intervalFactor)
|
||||
return time.Duration(int64(adjustedInterval) * normalizedFactor), nil
|
||||
}
|
||||
|
||||
// calculateRateInterval calculates the $__rate_interval value
|
||||
@@ -331,7 +415,8 @@ func calculateRateInterval(
|
||||
return time.Duration(0)
|
||||
}
|
||||
|
||||
rateInterval := time.Duration(int64(math.Max(float64(queryInterval+scrapeIntervalDuration), float64(4)*float64(scrapeIntervalDuration))))
|
||||
minRateInterval := rateIntervalMultiplier * scrapeIntervalDuration
|
||||
rateInterval := maxDuration(queryInterval+scrapeIntervalDuration, minRateInterval)
|
||||
return rateInterval
|
||||
}
|
||||
|
||||
@@ -366,34 +451,33 @@ func InterpolateVariables(
|
||||
rateInterval = calculateRateInterval(queryInterval, requestedMinStep)
|
||||
}
|
||||
|
||||
expr = strings.ReplaceAll(expr, varIntervalMs, strconv.FormatInt(int64(calculatedStep/time.Millisecond), 10))
|
||||
expr = strings.ReplaceAll(expr, varInterval, gtime.FormatInterval(calculatedStep))
|
||||
expr = strings.ReplaceAll(expr, varRangeMs, strconv.FormatInt(rangeMs, 10))
|
||||
expr = strings.ReplaceAll(expr, varRangeS, strconv.FormatInt(rangeSRounded, 10))
|
||||
expr = strings.ReplaceAll(expr, varRange, strconv.FormatInt(rangeSRounded, 10)+"s")
|
||||
expr = strings.ReplaceAll(expr, varRateIntervalMs, strconv.FormatInt(int64(rateInterval/time.Millisecond), 10))
|
||||
expr = strings.ReplaceAll(expr, varRateInterval, rateInterval.String())
|
||||
// Replace interval variables (both $__var and ${__var} formats)
|
||||
expr = replaceVariable(expr, varIntervalMs, varIntervalMsAlt, strconv.FormatInt(int64(calculatedStep/time.Millisecond), 10))
|
||||
expr = replaceVariable(expr, varInterval, varIntervalAlt, gtime.FormatInterval(calculatedStep))
|
||||
|
||||
// Replace range variables (both $__var and ${__var} formats)
|
||||
expr = replaceVariable(expr, varRangeMs, varRangeMsAlt, strconv.FormatInt(rangeMs, 10))
|
||||
expr = replaceVariable(expr, varRangeS, varRangeSAlt, strconv.FormatInt(rangeSRounded, 10))
|
||||
expr = replaceVariable(expr, varRange, varRangeAlt, strconv.FormatInt(rangeSRounded, 10)+"s")
|
||||
|
||||
// Replace rate interval variables (both $__var and ${__var} formats)
|
||||
expr = replaceVariable(expr, varRateIntervalMs, varRateIntervalMsAlt, strconv.FormatInt(int64(rateInterval/time.Millisecond), 10))
|
||||
expr = replaceVariable(expr, varRateInterval, varRateIntervalAlt, rateInterval.String())
|
||||
|
||||
// Repetitive code, we should have functionality to unify these
|
||||
expr = strings.ReplaceAll(expr, varIntervalMsAlt, strconv.FormatInt(int64(calculatedStep/time.Millisecond), 10))
|
||||
expr = strings.ReplaceAll(expr, varIntervalAlt, gtime.FormatInterval(calculatedStep))
|
||||
expr = strings.ReplaceAll(expr, varRangeMsAlt, strconv.FormatInt(rangeMs, 10))
|
||||
expr = strings.ReplaceAll(expr, varRangeSAlt, strconv.FormatInt(rangeSRounded, 10))
|
||||
expr = strings.ReplaceAll(expr, varRangeAlt, strconv.FormatInt(rangeSRounded, 10)+"s")
|
||||
expr = strings.ReplaceAll(expr, varRateIntervalMsAlt, strconv.FormatInt(int64(rateInterval/time.Millisecond), 10))
|
||||
expr = strings.ReplaceAll(expr, varRateIntervalAlt, rateInterval.String())
|
||||
return expr
|
||||
}
|
||||
|
||||
// isVariableInterval checks if the interval string is a variable interval
|
||||
// (any of $__interval, ${__interval}, $__interval_ms, ${__interval_ms}, $__rate_interval, ${__rate_interval}, etc.)
|
||||
func isVariableInterval(interval string) bool {
|
||||
if interval == varInterval || interval == varIntervalMs || interval == varRateInterval || interval == varRateIntervalMs {
|
||||
return true
|
||||
}
|
||||
// Repetitive code, we should have functionality to unify these
|
||||
if interval == varIntervalAlt || interval == varIntervalMsAlt || interval == varRateIntervalAlt || interval == varRateIntervalMsAlt {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
return interval == varInterval ||
|
||||
interval == varIntervalAlt ||
|
||||
interval == varIntervalMs ||
|
||||
interval == varIntervalMsAlt ||
|
||||
interval == varRateInterval ||
|
||||
interval == varRateIntervalAlt ||
|
||||
interval == varRateIntervalMs ||
|
||||
interval == varRateIntervalMsAlt
|
||||
}
|
||||
|
||||
// AlignTimeRange aligns query range to step and handles the time offset.
|
||||
@@ -410,7 +494,7 @@ func AlignTimeRange(t time.Time, step time.Duration, offset int64) time.Time {
|
||||
//go:embed query.types.json
|
||||
var f embed.FS
|
||||
|
||||
// QueryTypeDefinitionsJSON returns the query type definitions
|
||||
// QueryTypeDefinitionListJSON returns the query type definitions
|
||||
func QueryTypeDefinitionListJSON() (json.RawMessage, error) {
|
||||
return f.ReadFile("query.types.json")
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@ package models_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"testing"
|
||||
"time"
|
||||
@@ -14,6 +13,7 @@ import (
|
||||
"go.opentelemetry.io/otel"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
|
||||
|
||||
"github.com/grafana/grafana/pkg/promlib/intervalv2"
|
||||
"github.com/grafana/grafana/pkg/promlib/models"
|
||||
)
|
||||
@@ -50,95 +50,6 @@ func TestParse(t *testing.T) {
|
||||
require.Equal(t, false, res.ExemplarQuery)
|
||||
})
|
||||
|
||||
t.Run("parsing query model with step", func(t *testing.T) {
|
||||
timeRange := backend.TimeRange{
|
||||
From: now,
|
||||
To: now.Add(12 * time.Hour),
|
||||
}
|
||||
|
||||
q := queryContext(`{
|
||||
"expr": "go_goroutines",
|
||||
"format": "time_series",
|
||||
"refId": "A"
|
||||
}`, timeRange, time.Duration(1)*time.Minute)
|
||||
|
||||
res, err := models.Parse(context.Background(), log.New(), span, q, "15s", intervalCalculator, false)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, time.Second*30, res.Step)
|
||||
})
|
||||
|
||||
t.Run("parsing query model without step parameter", func(t *testing.T) {
|
||||
timeRange := backend.TimeRange{
|
||||
From: now,
|
||||
To: now.Add(1 * time.Hour),
|
||||
}
|
||||
|
||||
q := queryContext(`{
|
||||
"expr": "go_goroutines",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 1,
|
||||
"refId": "A"
|
||||
}`, timeRange, time.Duration(1)*time.Minute)
|
||||
|
||||
res, err := models.Parse(context.Background(), log.New(), span, q, "15s", intervalCalculator, false)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, time.Second*15, res.Step)
|
||||
})
|
||||
|
||||
t.Run("parsing query model with high intervalFactor", func(t *testing.T) {
|
||||
timeRange := backend.TimeRange{
|
||||
From: now,
|
||||
To: now.Add(48 * time.Hour),
|
||||
}
|
||||
|
||||
q := queryContext(`{
|
||||
"expr": "go_goroutines",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 10,
|
||||
"refId": "A"
|
||||
}`, timeRange, time.Duration(1)*time.Minute)
|
||||
|
||||
res, err := models.Parse(context.Background(), log.New(), span, q, "15s", intervalCalculator, false)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, time.Minute*20, res.Step)
|
||||
})
|
||||
|
||||
t.Run("parsing query model with low intervalFactor", func(t *testing.T) {
|
||||
timeRange := backend.TimeRange{
|
||||
From: now,
|
||||
To: now.Add(48 * time.Hour),
|
||||
}
|
||||
|
||||
q := queryContext(`{
|
||||
"expr": "go_goroutines",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 1,
|
||||
"refId": "A"
|
||||
}`, timeRange, time.Duration(1)*time.Minute)
|
||||
|
||||
res, err := models.Parse(context.Background(), log.New(), span, q, "15s", intervalCalculator, false)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, time.Minute*2, res.Step)
|
||||
})
|
||||
|
||||
t.Run("parsing query model specified scrape-interval in the data source", func(t *testing.T) {
|
||||
timeRange := backend.TimeRange{
|
||||
From: now,
|
||||
To: now.Add(48 * time.Hour),
|
||||
}
|
||||
|
||||
q := queryContext(`{
|
||||
"expr": "go_goroutines",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 1,
|
||||
"refId": "A"
|
||||
}`, timeRange, time.Duration(1)*time.Minute)
|
||||
|
||||
res, err := models.Parse(context.Background(), log.New(), span, q, "240s", intervalCalculator, false)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, time.Minute*4, res.Step)
|
||||
})
|
||||
|
||||
t.Run("parsing query model with $__interval variable", func(t *testing.T) {
|
||||
timeRange := backend.TimeRange{
|
||||
From: now,
|
||||
@@ -176,7 +87,7 @@ func TestParse(t *testing.T) {
|
||||
|
||||
res, err := models.Parse(context.Background(), log.New(), span, q, "15s", intervalCalculator, false)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, "rate(ALERTS{job=\"test\" [2m]})", res.Expr)
|
||||
require.Equal(t, "rate(ALERTS{job=\"test\" [1m]})", res.Expr)
|
||||
})
|
||||
|
||||
t.Run("parsing query model with $__interval_ms variable", func(t *testing.T) {
|
||||
@@ -533,232 +444,6 @@ func TestParse(t *testing.T) {
|
||||
})
|
||||
}
|
||||
|
||||
func TestRateInterval(t *testing.T) {
|
||||
_, span := tracer.Start(context.Background(), "operation")
|
||||
defer span.End()
|
||||
type args struct {
|
||||
expr string
|
||||
interval string
|
||||
intervalMs int64
|
||||
dsScrapeInterval string
|
||||
timeRange *backend.TimeRange
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
want *models.Query
|
||||
}{
|
||||
{
|
||||
name: "intervalMs 100s, minStep override 150s and scrape interval 30s",
|
||||
args: args{
|
||||
expr: "rate(rpc_durations_seconds_count[$__rate_interval])",
|
||||
interval: "150s",
|
||||
intervalMs: 100000,
|
||||
dsScrapeInterval: "30s",
|
||||
},
|
||||
want: &models.Query{
|
||||
Expr: "rate(rpc_durations_seconds_count[10m0s])",
|
||||
Step: time.Second * 150,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "intervalMs 120s, minStep override 150s and ds scrape interval 30s",
|
||||
args: args{
|
||||
expr: "rate(rpc_durations_seconds_count[$__rate_interval])",
|
||||
interval: "150s",
|
||||
intervalMs: 120000,
|
||||
dsScrapeInterval: "30s",
|
||||
},
|
||||
want: &models.Query{
|
||||
Expr: "rate(rpc_durations_seconds_count[10m0s])",
|
||||
Step: time.Second * 150,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "intervalMs 120s, minStep auto (interval not overridden) and ds scrape interval 30s",
|
||||
args: args{
|
||||
expr: "rate(rpc_durations_seconds_count[$__rate_interval])",
|
||||
interval: "120s",
|
||||
intervalMs: 120000,
|
||||
dsScrapeInterval: "30s",
|
||||
},
|
||||
want: &models.Query{
|
||||
Expr: "rate(rpc_durations_seconds_count[8m0s])",
|
||||
Step: time.Second * 120,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "interval and minStep are automatically calculated and ds scrape interval 30s and time range 1 hour",
|
||||
args: args{
|
||||
expr: "rate(rpc_durations_seconds_count[$__rate_interval])",
|
||||
interval: "30s",
|
||||
intervalMs: 30000,
|
||||
dsScrapeInterval: "30s",
|
||||
timeRange: &backend.TimeRange{
|
||||
From: now,
|
||||
To: now.Add(1 * time.Hour),
|
||||
},
|
||||
},
|
||||
want: &models.Query{
|
||||
Expr: "rate(rpc_durations_seconds_count[2m0s])",
|
||||
Step: time.Second * 30,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "minStep is $__rate_interval and ds scrape interval 30s and time range 1 hour",
|
||||
args: args{
|
||||
expr: "rate(rpc_durations_seconds_count[$__rate_interval])",
|
||||
interval: "$__rate_interval",
|
||||
intervalMs: 30000,
|
||||
dsScrapeInterval: "30s",
|
||||
timeRange: &backend.TimeRange{
|
||||
From: now,
|
||||
To: now.Add(1 * time.Hour),
|
||||
},
|
||||
},
|
||||
want: &models.Query{
|
||||
Expr: "rate(rpc_durations_seconds_count[2m0s])",
|
||||
Step: time.Minute * 2,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "minStep is $__rate_interval and ds scrape interval 30s and time range 2 days",
|
||||
args: args{
|
||||
expr: "rate(rpc_durations_seconds_count[$__rate_interval])",
|
||||
interval: "$__rate_interval",
|
||||
intervalMs: 120000,
|
||||
dsScrapeInterval: "30s",
|
||||
timeRange: &backend.TimeRange{
|
||||
From: now,
|
||||
To: now.Add(2 * 24 * time.Hour),
|
||||
},
|
||||
},
|
||||
want: &models.Query{
|
||||
Expr: "rate(rpc_durations_seconds_count[2m30s])",
|
||||
Step: time.Second * 150,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "minStep is $__rate_interval and ds scrape interval 15s and time range 2 days",
|
||||
args: args{
|
||||
expr: "rate(rpc_durations_seconds_count[$__rate_interval])",
|
||||
interval: "$__interval",
|
||||
intervalMs: 120000,
|
||||
dsScrapeInterval: "15s",
|
||||
timeRange: &backend.TimeRange{
|
||||
From: now,
|
||||
To: now.Add(2 * 24 * time.Hour),
|
||||
},
|
||||
},
|
||||
want: &models.Query{
|
||||
Expr: "rate(rpc_durations_seconds_count[8m0s])",
|
||||
Step: time.Second * 120,
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
q := mockQuery(tt.args.expr, tt.args.interval, tt.args.intervalMs, tt.args.timeRange)
|
||||
q.MaxDataPoints = 12384
|
||||
res, err := models.Parse(context.Background(), log.New(), span, q, tt.args.dsScrapeInterval, intervalCalculator, false)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, tt.want.Expr, res.Expr)
|
||||
require.Equal(t, tt.want.Step, res.Step)
|
||||
})
|
||||
}
|
||||
|
||||
t.Run("minStep is auto and ds scrape interval 30s and time range 1 hour", func(t *testing.T) {
|
||||
query := backend.DataQuery{
|
||||
RefID: "G",
|
||||
QueryType: "",
|
||||
MaxDataPoints: 1613,
|
||||
Interval: 30 * time.Second,
|
||||
TimeRange: backend.TimeRange{
|
||||
From: now,
|
||||
To: now.Add(1 * time.Hour),
|
||||
},
|
||||
JSON: []byte(`{
|
||||
"datasource":{"type":"prometheus","uid":"zxS5e5W4k"},
|
||||
"datasourceId":38,
|
||||
"editorMode":"code",
|
||||
"exemplar":false,
|
||||
"expr":"sum(rate(process_cpu_seconds_total[$__rate_interval]))",
|
||||
"instant":false,
|
||||
"interval":"",
|
||||
"intervalMs":30000,
|
||||
"key":"Q-f96b6729-c47a-4ea8-8f71-a79774cf9bd5-0",
|
||||
"legendFormat":"__auto",
|
||||
"maxDataPoints":1613,
|
||||
"range":true,
|
||||
"refId":"G",
|
||||
"requestId":"1G",
|
||||
"utcOffsetSec":3600
|
||||
}`),
|
||||
}
|
||||
res, err := models.Parse(context.Background(), log.New(), span, query, "30s", intervalCalculator, false)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, "sum(rate(process_cpu_seconds_total[2m0s]))", res.Expr)
|
||||
require.Equal(t, 30*time.Second, res.Step)
|
||||
})
|
||||
|
||||
t.Run("minStep is auto and ds scrape interval 15s and time range 5 minutes", func(t *testing.T) {
|
||||
query := backend.DataQuery{
|
||||
RefID: "A",
|
||||
QueryType: "",
|
||||
MaxDataPoints: 1055,
|
||||
Interval: 15 * time.Second,
|
||||
TimeRange: backend.TimeRange{
|
||||
From: now,
|
||||
To: now.Add(5 * time.Minute),
|
||||
},
|
||||
JSON: []byte(`{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "2z9d6ElGk"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"expr": "sum(rate(cache_requests_total[$__rate_interval]))",
|
||||
"legendFormat": "__auto",
|
||||
"range": true,
|
||||
"refId": "A",
|
||||
"exemplar": false,
|
||||
"requestId": "1A",
|
||||
"utcOffsetSec": 0,
|
||||
"interval": "",
|
||||
"datasourceId": 508,
|
||||
"intervalMs": 15000,
|
||||
"maxDataPoints": 1055
|
||||
}`),
|
||||
}
|
||||
res, err := models.Parse(context.Background(), log.New(), span, query, "15s", intervalCalculator, false)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, "sum(rate(cache_requests_total[1m0s]))", res.Expr)
|
||||
require.Equal(t, 15*time.Second, res.Step)
|
||||
})
|
||||
}
|
||||
|
||||
func mockQuery(expr string, interval string, intervalMs int64, timeRange *backend.TimeRange) backend.DataQuery {
|
||||
if timeRange == nil {
|
||||
timeRange = &backend.TimeRange{
|
||||
From: now,
|
||||
To: now.Add(1 * time.Hour),
|
||||
}
|
||||
}
|
||||
return backend.DataQuery{
|
||||
Interval: time.Duration(intervalMs) * time.Millisecond,
|
||||
JSON: []byte(fmt.Sprintf(`{
|
||||
"expr": "%s",
|
||||
"format": "time_series",
|
||||
"interval": "%s",
|
||||
"intervalMs": %v,
|
||||
"intervalFactor": 1,
|
||||
"refId": "A"
|
||||
}`, expr, interval, intervalMs)),
|
||||
TimeRange: *timeRange,
|
||||
RefID: "A",
|
||||
}
|
||||
}
|
||||
|
||||
func queryContext(json string, timeRange backend.TimeRange, queryInterval time.Duration) backend.DataQuery {
|
||||
return backend.DataQuery{
|
||||
Interval: queryInterval,
|
||||
@@ -768,11 +453,6 @@ func queryContext(json string, timeRange backend.TimeRange, queryInterval time.D
|
||||
}
|
||||
}
|
||||
|
||||
// AlignTimeRange aligns query range to step and handles the time offset.
|
||||
// It rounds start and end down to a multiple of step.
|
||||
// Prometheus caching is dependent on the range being aligned with the step.
|
||||
// Rounding to the step can significantly change the start and end of the range for larger steps, i.e. a week.
|
||||
// In rounding the range to a 1w step the range will always start on a Thursday.
|
||||
func TestAlignTimeRange(t *testing.T) {
|
||||
type args struct {
|
||||
t time.Time
|
||||
|
||||
@@ -381,6 +381,102 @@ func TestPrometheus_parseTimeSeriesResponse(t *testing.T) {
|
||||
})
|
||||
}
|
||||
|
||||
func TestPrometheus_executedQueryString(t *testing.T) {
|
||||
t.Run("executedQueryString should match expected format with intervalMs 300_000", func(t *testing.T) {
|
||||
values := []p.SamplePair{
|
||||
{Value: 1, Timestamp: 1000},
|
||||
{Value: 2, Timestamp: 2000},
|
||||
}
|
||||
result := queryResult{
|
||||
Type: p.ValMatrix,
|
||||
Result: p.Matrix{
|
||||
&p.SampleStream{
|
||||
Metric: p.Metric{"app": "Application"},
|
||||
Values: values,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
queryJSON := `{
|
||||
"expr": "test_metric",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 1,
|
||||
"interval": "2m",
|
||||
"intervalMs": 300000,
|
||||
"maxDataPoints": 761,
|
||||
"refId": "A",
|
||||
"range": true
|
||||
}`
|
||||
|
||||
now := time.Now()
|
||||
query := backend.DataQuery{
|
||||
RefID: "A",
|
||||
MaxDataPoints: 761,
|
||||
Interval: 300000 * time.Millisecond,
|
||||
TimeRange: backend.TimeRange{
|
||||
From: now,
|
||||
To: now.Add(48 * time.Hour),
|
||||
},
|
||||
JSON: []byte(queryJSON),
|
||||
}
|
||||
tctx, err := setup()
|
||||
require.NoError(t, err)
|
||||
res, err := execute(tctx, query, result, nil)
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Len(t, res, 1)
|
||||
require.NotNil(t, res[0].Meta)
|
||||
require.Equal(t, "Expr: test_metric\nStep: 2m0s", res[0].Meta.ExecutedQueryString)
|
||||
})
|
||||
|
||||
t.Run("executedQueryString should match expected format with intervalMs 900_000", func(t *testing.T) {
|
||||
values := []p.SamplePair{
|
||||
{Value: 1, Timestamp: 1000},
|
||||
{Value: 2, Timestamp: 2000},
|
||||
}
|
||||
result := queryResult{
|
||||
Type: p.ValMatrix,
|
||||
Result: p.Matrix{
|
||||
&p.SampleStream{
|
||||
Metric: p.Metric{"app": "Application"},
|
||||
Values: values,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
queryJSON := `{
|
||||
"expr": "test_metric",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 1,
|
||||
"interval": "2m",
|
||||
"intervalMs": 900000,
|
||||
"maxDataPoints": 175,
|
||||
"refId": "A",
|
||||
"range": true
|
||||
}`
|
||||
|
||||
now := time.Now()
|
||||
query := backend.DataQuery{
|
||||
RefID: "A",
|
||||
MaxDataPoints: 175,
|
||||
Interval: 900000 * time.Millisecond,
|
||||
TimeRange: backend.TimeRange{
|
||||
From: now,
|
||||
To: now.Add(48 * time.Hour),
|
||||
},
|
||||
JSON: []byte(queryJSON),
|
||||
}
|
||||
tctx, err := setup()
|
||||
require.NoError(t, err)
|
||||
res, err := execute(tctx, query, result, nil)
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Len(t, res, 1)
|
||||
require.NotNil(t, res[0].Meta)
|
||||
require.Equal(t, "Expr: test_metric\nStep: 2m0s", res[0].Meta.ExecutedQueryString)
|
||||
})
|
||||
}
|
||||
|
||||
type queryResult struct {
|
||||
Type p.ValueType `json:"resultType"`
|
||||
Result any `json:"result"`
|
||||
|
||||
Reference in New Issue
Block a user