* Moving things around
* Update frontend to support CUA
* Add CUA support to backend
* Copy parseURL function to where it's used
* Update test
* Remove experimental-strip-types
* Docs
* A bit more of a refactor to reduce complexity
* Revert "Remove experimental-strip-types"
This reverts commit 70fbc1c0cd.
* Review
* Docs updates
* Another docs fix
706 lines
21 KiB
Go
706 lines
21 KiB
Go
package sqleng
|
|
|
|
import (
|
|
"encoding/json"
|
|
"fmt"
|
|
"net"
|
|
"os"
|
|
"testing"
|
|
"time"
|
|
|
|
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
|
"github.com/grafana/grafana-plugin-sdk-go/data"
|
|
"github.com/grafana/grafana-plugin-sdk-go/data/sqlutil"
|
|
"github.com/stretchr/testify/assert"
|
|
"github.com/stretchr/testify/require"
|
|
|
|
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
|
|
"github.com/grafana/grafana/pkg/tsdb/mssql/kerberos"
|
|
"github.com/grafana/grafana/pkg/tsdb/mssql/sqleng/util"
|
|
)
|
|
|
|
func TestSQLEngine(t *testing.T) {
|
|
dt := time.Date(2018, 3, 14, 21, 20, 6, int(527345*time.Microsecond), time.UTC)
|
|
|
|
t.Run("Handle interpolating $__interval and $__interval_ms", func(t *testing.T) {
|
|
from := time.Date(2018, 4, 12, 18, 0, 0, 0, time.UTC)
|
|
to := from.Add(5 * time.Minute)
|
|
timeRange := backend.TimeRange{From: from, To: to}
|
|
|
|
text := "$__interval $__timeGroupAlias(time,$__interval) $__interval_ms"
|
|
|
|
t.Run("interpolate 10 minutes $__interval", func(t *testing.T) {
|
|
query := backend.DataQuery{JSON: []byte("{}"), MaxDataPoints: 1500, Interval: time.Minute * 10}
|
|
sql := Interpolate(query, timeRange, "", text)
|
|
require.Equal(t, "10m $__timeGroupAlias(time,10m) 600000", sql)
|
|
})
|
|
|
|
t.Run("interpolate 4seconds $__interval", func(t *testing.T) {
|
|
query := backend.DataQuery{JSON: []byte("{}"), MaxDataPoints: 1500, Interval: time.Second * 4}
|
|
sql := Interpolate(query, timeRange, "", text)
|
|
require.Equal(t, "4s $__timeGroupAlias(time,4s) 4000", sql)
|
|
})
|
|
|
|
t.Run("interpolate 200 milliseconds $__interval", func(t *testing.T) {
|
|
query := backend.DataQuery{JSON: []byte("{}"), MaxDataPoints: 1500, Interval: time.Millisecond * 200}
|
|
sql := Interpolate(query, timeRange, "", text)
|
|
require.Equal(t, "200ms $__timeGroupAlias(time,200ms) 200", sql)
|
|
})
|
|
})
|
|
|
|
t.Run("Given a time range between 2018-04-12 00:00 and 2018-04-12 00:05", func(t *testing.T) {
|
|
from := time.Date(2018, 4, 12, 18, 0, 0, 0, time.UTC)
|
|
to := from.Add(5 * time.Minute)
|
|
timeRange := backend.TimeRange{From: from, To: to}
|
|
query := backend.DataQuery{JSON: []byte("{}"), MaxDataPoints: 1500, Interval: time.Second * 60}
|
|
|
|
t.Run("interpolate __unixEpochFrom function", func(t *testing.T) {
|
|
sql := Interpolate(query, timeRange, "", "select $__unixEpochFrom()")
|
|
require.Equal(t, fmt.Sprintf("select %d", from.Unix()), sql)
|
|
})
|
|
|
|
t.Run("interpolate __unixEpochTo function", func(t *testing.T) {
|
|
sql := Interpolate(query, timeRange, "", "select $__unixEpochTo()")
|
|
require.Equal(t, fmt.Sprintf("select %d", to.Unix()), sql)
|
|
})
|
|
})
|
|
|
|
t.Run("Given row values with int64 as time columns", func(t *testing.T) {
|
|
tSeconds := dt.Unix()
|
|
tMilliseconds := dt.UnixNano() / 1e6
|
|
tNanoSeconds := dt.UnixNano()
|
|
var nilPointer *int64
|
|
|
|
originFrame := data.NewFrame("",
|
|
data.NewField("time1", nil, []int64{
|
|
tSeconds,
|
|
}),
|
|
data.NewField("time2", nil, []*int64{
|
|
util.Pointer(tSeconds),
|
|
}),
|
|
data.NewField("time3", nil, []int64{
|
|
tMilliseconds,
|
|
}),
|
|
data.NewField("time4", nil, []*int64{
|
|
util.Pointer(tMilliseconds),
|
|
}),
|
|
data.NewField("time5", nil, []int64{
|
|
tNanoSeconds,
|
|
}),
|
|
data.NewField("time6", nil, []*int64{
|
|
util.Pointer(tNanoSeconds),
|
|
}),
|
|
data.NewField("time7", nil, []*int64{
|
|
nilPointer,
|
|
}),
|
|
)
|
|
|
|
for i := 0; i < len(originFrame.Fields); i++ {
|
|
err := convertSQLTimeColumnToEpochMS(originFrame, i)
|
|
require.NoError(t, err)
|
|
}
|
|
|
|
require.Equal(t, dt.Unix(), (*originFrame.Fields[0].At(0).(*time.Time)).Unix())
|
|
require.Equal(t, dt.Unix(), (*originFrame.Fields[1].At(0).(*time.Time)).Unix())
|
|
require.Equal(t, dt.Unix(), (*originFrame.Fields[2].At(0).(*time.Time)).Unix())
|
|
require.Equal(t, dt.Unix(), (*originFrame.Fields[3].At(0).(*time.Time)).Unix())
|
|
require.Equal(t, dt.Unix(), (*originFrame.Fields[4].At(0).(*time.Time)).Unix())
|
|
require.Equal(t, dt.Unix(), (*originFrame.Fields[5].At(0).(*time.Time)).Unix())
|
|
require.Nil(t, originFrame.Fields[6].At(0))
|
|
})
|
|
|
|
t.Run("Given row values with uint64 as time columns", func(t *testing.T) {
|
|
tSeconds := uint64(dt.Unix())
|
|
tMilliseconds := uint64(dt.UnixNano() / 1e6)
|
|
tNanoSeconds := uint64(dt.UnixNano())
|
|
var nilPointer *uint64
|
|
|
|
originFrame := data.NewFrame("",
|
|
data.NewField("time1", nil, []uint64{
|
|
tSeconds,
|
|
}),
|
|
data.NewField("time2", nil, []*uint64{
|
|
util.Pointer(tSeconds),
|
|
}),
|
|
data.NewField("time3", nil, []uint64{
|
|
tMilliseconds,
|
|
}),
|
|
data.NewField("time4", nil, []*uint64{
|
|
util.Pointer(tMilliseconds),
|
|
}),
|
|
data.NewField("time5", nil, []uint64{
|
|
tNanoSeconds,
|
|
}),
|
|
data.NewField("time6", nil, []*uint64{
|
|
util.Pointer(tNanoSeconds),
|
|
}),
|
|
data.NewField("time7", nil, []*uint64{
|
|
nilPointer,
|
|
}),
|
|
)
|
|
|
|
for i := 0; i < len(originFrame.Fields); i++ {
|
|
err := convertSQLTimeColumnToEpochMS(originFrame, i)
|
|
require.NoError(t, err)
|
|
}
|
|
|
|
require.Equal(t, dt.Unix(), (*originFrame.Fields[0].At(0).(*time.Time)).Unix())
|
|
require.Equal(t, dt.Unix(), (*originFrame.Fields[1].At(0).(*time.Time)).Unix())
|
|
require.Equal(t, dt.Unix(), (*originFrame.Fields[2].At(0).(*time.Time)).Unix())
|
|
require.Equal(t, dt.Unix(), (*originFrame.Fields[3].At(0).(*time.Time)).Unix())
|
|
require.Equal(t, dt.Unix(), (*originFrame.Fields[4].At(0).(*time.Time)).Unix())
|
|
require.Equal(t, dt.Unix(), (*originFrame.Fields[5].At(0).(*time.Time)).Unix())
|
|
require.Nil(t, originFrame.Fields[6].At(0))
|
|
})
|
|
|
|
t.Run("Given row values with int32 as time columns", func(t *testing.T) {
|
|
tSeconds := int32(dt.Unix())
|
|
var nilInt *int32
|
|
|
|
originFrame := data.NewFrame("",
|
|
data.NewField("time1", nil, []int32{
|
|
tSeconds,
|
|
}),
|
|
data.NewField("time2", nil, []*int32{
|
|
util.Pointer(tSeconds),
|
|
}),
|
|
data.NewField("time7", nil, []*int32{
|
|
nilInt,
|
|
}),
|
|
)
|
|
for i := 0; i < 3; i++ {
|
|
err := convertSQLTimeColumnToEpochMS(originFrame, i)
|
|
require.NoError(t, err)
|
|
}
|
|
|
|
require.Equal(t, dt.Unix(), (*originFrame.Fields[0].At(0).(*time.Time)).Unix())
|
|
require.Equal(t, dt.Unix(), (*originFrame.Fields[1].At(0).(*time.Time)).Unix())
|
|
require.Nil(t, originFrame.Fields[2].At(0))
|
|
})
|
|
|
|
t.Run("Given row values with uint32 as time columns", func(t *testing.T) {
|
|
tSeconds := uint32(dt.Unix())
|
|
var nilInt *uint32
|
|
|
|
originFrame := data.NewFrame("",
|
|
data.NewField("time1", nil, []uint32{
|
|
tSeconds,
|
|
}),
|
|
data.NewField("time2", nil, []*uint32{
|
|
util.Pointer(tSeconds),
|
|
}),
|
|
data.NewField("time7", nil, []*uint32{
|
|
nilInt,
|
|
}),
|
|
)
|
|
for i := 0; i < len(originFrame.Fields); i++ {
|
|
err := convertSQLTimeColumnToEpochMS(originFrame, i)
|
|
require.NoError(t, err)
|
|
}
|
|
require.Equal(t, dt.Unix(), (*originFrame.Fields[0].At(0).(*time.Time)).Unix())
|
|
require.Equal(t, dt.Unix(), (*originFrame.Fields[1].At(0).(*time.Time)).Unix())
|
|
require.Nil(t, originFrame.Fields[2].At(0))
|
|
})
|
|
|
|
t.Run("Given row values with float64 as time columns", func(t *testing.T) {
|
|
tSeconds := float64(dt.UnixNano()) / float64(time.Second)
|
|
tMilliseconds := float64(dt.UnixNano()) / float64(time.Millisecond)
|
|
tNanoSeconds := float64(dt.UnixNano())
|
|
var nilPointer *float64
|
|
|
|
originFrame := data.NewFrame("",
|
|
data.NewField("time1", nil, []float64{
|
|
tSeconds,
|
|
}),
|
|
data.NewField("time2", nil, []*float64{
|
|
util.Pointer(tSeconds),
|
|
}),
|
|
data.NewField("time3", nil, []float64{
|
|
tMilliseconds,
|
|
}),
|
|
data.NewField("time4", nil, []*float64{
|
|
util.Pointer(tMilliseconds),
|
|
}),
|
|
data.NewField("time5", nil, []float64{
|
|
tNanoSeconds,
|
|
}),
|
|
data.NewField("time6", nil, []*float64{
|
|
util.Pointer(tNanoSeconds),
|
|
}),
|
|
data.NewField("time7", nil, []*float64{
|
|
nilPointer,
|
|
}),
|
|
)
|
|
|
|
for i := 0; i < len(originFrame.Fields); i++ {
|
|
err := convertSQLTimeColumnToEpochMS(originFrame, i)
|
|
require.NoError(t, err)
|
|
}
|
|
|
|
require.Equal(t, dt.Unix(), (*originFrame.Fields[0].At(0).(*time.Time)).Unix())
|
|
require.Equal(t, dt.Unix(), (*originFrame.Fields[1].At(0).(*time.Time)).Unix())
|
|
require.Equal(t, dt.Unix(), (*originFrame.Fields[2].At(0).(*time.Time)).Unix())
|
|
require.Equal(t, dt.Unix(), (*originFrame.Fields[3].At(0).(*time.Time)).Unix())
|
|
require.Equal(t, dt.Unix(), (*originFrame.Fields[4].At(0).(*time.Time)).Unix())
|
|
require.Equal(t, dt.Unix(), (*originFrame.Fields[5].At(0).(*time.Time)).Unix())
|
|
require.Nil(t, originFrame.Fields[6].At(0))
|
|
})
|
|
|
|
t.Run("Given row values with float32 as time columns", func(t *testing.T) {
|
|
tSeconds := float32(dt.Unix())
|
|
var nilInt *float32
|
|
|
|
originFrame := data.NewFrame("",
|
|
data.NewField("time1", nil, []float32{
|
|
tSeconds,
|
|
}),
|
|
data.NewField("time2", nil, []*float32{
|
|
util.Pointer(tSeconds),
|
|
}),
|
|
data.NewField("time7", nil, []*float32{
|
|
nilInt,
|
|
}),
|
|
)
|
|
for i := 0; i < len(originFrame.Fields); i++ {
|
|
err := convertSQLTimeColumnToEpochMS(originFrame, i)
|
|
require.NoError(t, err)
|
|
}
|
|
require.Equal(t, int64(tSeconds), (*originFrame.Fields[0].At(0).(*time.Time)).Unix())
|
|
require.Equal(t, int64(tSeconds), (*originFrame.Fields[1].At(0).(*time.Time)).Unix())
|
|
require.Nil(t, originFrame.Fields[2].At(0))
|
|
})
|
|
|
|
t.Run("Given row with value columns, would be converted to float64", func(t *testing.T) {
|
|
originFrame := data.NewFrame("",
|
|
data.NewField("value1", nil, []int64{
|
|
int64(1),
|
|
}),
|
|
data.NewField("value2", nil, []*int64{
|
|
util.Pointer(int64(1)),
|
|
}),
|
|
data.NewField("value3", nil, []int32{
|
|
int32(1),
|
|
}),
|
|
data.NewField("value4", nil, []*int32{
|
|
util.Pointer(int32(1)),
|
|
}),
|
|
data.NewField("value5", nil, []int16{
|
|
int16(1),
|
|
}),
|
|
data.NewField("value6", nil, []*int16{
|
|
util.Pointer(int16(1)),
|
|
}),
|
|
data.NewField("value7", nil, []int8{
|
|
int8(1),
|
|
}),
|
|
data.NewField("value8", nil, []*int8{
|
|
util.Pointer(int8(1)),
|
|
}),
|
|
data.NewField("value9", nil, []float64{
|
|
float64(1),
|
|
}),
|
|
data.NewField("value10", nil, []*float64{
|
|
util.Pointer(1.0),
|
|
}),
|
|
data.NewField("value11", nil, []float32{
|
|
float32(1),
|
|
}),
|
|
data.NewField("value12", nil, []*float32{
|
|
util.Pointer(float32(1)),
|
|
}),
|
|
data.NewField("value13", nil, []uint64{
|
|
uint64(1),
|
|
}),
|
|
data.NewField("value14", nil, []*uint64{
|
|
util.Pointer(uint64(1)),
|
|
}),
|
|
data.NewField("value15", nil, []uint32{
|
|
uint32(1),
|
|
}),
|
|
data.NewField("value16", nil, []*uint32{
|
|
util.Pointer(uint32(1)),
|
|
}),
|
|
data.NewField("value17", nil, []uint16{
|
|
uint16(1),
|
|
}),
|
|
data.NewField("value18", nil, []*uint16{
|
|
util.Pointer(uint16(1)),
|
|
}),
|
|
data.NewField("value19", nil, []uint8{
|
|
uint8(1),
|
|
}),
|
|
data.NewField("value20", nil, []*uint8{
|
|
util.Pointer(uint8(1)),
|
|
}),
|
|
)
|
|
for i := 0; i < len(originFrame.Fields); i++ {
|
|
_, err := convertSQLValueColumnToFloat(originFrame, i)
|
|
require.NoError(t, err)
|
|
if i == 8 {
|
|
require.Equal(t, float64(1), originFrame.Fields[i].At(0).(float64))
|
|
} else {
|
|
require.NotNil(t, originFrame.Fields[i].At(0).(*float64))
|
|
require.Equal(t, float64(1), *originFrame.Fields[i].At(0).(*float64))
|
|
}
|
|
}
|
|
})
|
|
|
|
t.Run("Given row with nil value columns", func(t *testing.T) {
|
|
var int64NilPointer *int64
|
|
var int32NilPointer *int32
|
|
var int16NilPointer *int16
|
|
var int8NilPointer *int8
|
|
var float64NilPointer *float64
|
|
var float32NilPointer *float32
|
|
var uint64NilPointer *uint64
|
|
var uint32NilPointer *uint32
|
|
var uint16NilPointer *uint16
|
|
var uint8NilPointer *uint8
|
|
|
|
originFrame := data.NewFrame("",
|
|
data.NewField("value1", nil, []*int64{
|
|
int64NilPointer,
|
|
}),
|
|
data.NewField("value2", nil, []*int32{
|
|
int32NilPointer,
|
|
}),
|
|
data.NewField("value3", nil, []*int16{
|
|
int16NilPointer,
|
|
}),
|
|
data.NewField("value4", nil, []*int8{
|
|
int8NilPointer,
|
|
}),
|
|
data.NewField("value5", nil, []*float64{
|
|
float64NilPointer,
|
|
}),
|
|
data.NewField("value6", nil, []*float32{
|
|
float32NilPointer,
|
|
}),
|
|
data.NewField("value7", nil, []*uint64{
|
|
uint64NilPointer,
|
|
}),
|
|
data.NewField("value8", nil, []*uint32{
|
|
uint32NilPointer,
|
|
}),
|
|
data.NewField("value9", nil, []*uint16{
|
|
uint16NilPointer,
|
|
}),
|
|
data.NewField("value10", nil, []*uint8{
|
|
uint8NilPointer,
|
|
}),
|
|
)
|
|
for i := 0; i < len(originFrame.Fields); i++ {
|
|
t.Run("", func(t *testing.T) {
|
|
_, err := convertSQLValueColumnToFloat(originFrame, i)
|
|
require.NoError(t, err)
|
|
require.Nil(t, originFrame.Fields[i].At(0))
|
|
})
|
|
}
|
|
})
|
|
|
|
t.Run("Should not return raw connection errors", func(t *testing.T) {
|
|
err := net.OpError{Op: "Dial", Err: fmt.Errorf("inner-error")}
|
|
transformer := &testQueryResultTransformer{}
|
|
dp := DataSourceHandler{
|
|
log: backend.NewLoggerWith("logger", "test"),
|
|
queryResultTransformer: transformer,
|
|
}
|
|
resultErr := dp.TransformQueryError(dp.log, &err)
|
|
assert.False(t, transformer.transformQueryErrorWasCalled)
|
|
errorText := resultErr.Error()
|
|
assert.NotEqual(t, err, resultErr)
|
|
assert.NotContains(t, errorText, "inner-error")
|
|
assert.Contains(t, errorText, "failed to connect to server")
|
|
})
|
|
|
|
t.Run("Should return non-connection errors unmodified", func(t *testing.T) {
|
|
err := fmt.Errorf("normal error")
|
|
transformer := &testQueryResultTransformer{}
|
|
dp := DataSourceHandler{
|
|
log: backend.NewLoggerWith("logger", "test"),
|
|
queryResultTransformer: transformer,
|
|
}
|
|
resultErr := dp.TransformQueryError(dp.log, err)
|
|
assert.True(t, transformer.transformQueryErrorWasCalled)
|
|
assert.Equal(t, err, resultErr)
|
|
assert.ErrorIs(t, err, resultErr)
|
|
})
|
|
}
|
|
|
|
type testQueryResultTransformer struct {
|
|
transformQueryErrorWasCalled bool
|
|
}
|
|
|
|
func (t *testQueryResultTransformer) TransformQueryError(_ log.Logger, err error) error {
|
|
t.transformQueryErrorWasCalled = true
|
|
return err
|
|
}
|
|
|
|
func (t *testQueryResultTransformer) GetConverterList() []sqlutil.StringConverter {
|
|
return nil
|
|
}
|
|
|
|
func genTempCacheFile(t *testing.T, lookups []kerberos.KerberosLookup) string {
|
|
content, err := json.Marshal(lookups)
|
|
if err != nil {
|
|
t.Fatalf("Unable to marshall json for temp lookup: %v", err)
|
|
}
|
|
|
|
tmpFile, err := os.CreateTemp("", "lookup*.json")
|
|
if err != nil {
|
|
t.Fatalf("Unable to create temporary file for temp lookup: %v", err)
|
|
}
|
|
|
|
if _, err := tmpFile.Write(content); err != nil {
|
|
t.Fatalf("Unable to write to temporary file for temp lookup: %v", err)
|
|
}
|
|
|
|
return tmpFile.Name()
|
|
}
|
|
|
|
func TestGenerateConnectionString(t *testing.T) {
|
|
kerberosLookup := []kerberos.KerberosLookup{
|
|
{
|
|
Address: "example.host",
|
|
DBName: "testDB",
|
|
User: "testUser",
|
|
CredentialCacheFilename: "/tmp/cache",
|
|
},
|
|
}
|
|
tmpFile := genTempCacheFile(t, kerberosLookup)
|
|
defer func() {
|
|
err := os.Remove(tmpFile)
|
|
if err != nil {
|
|
t.Log(err)
|
|
}
|
|
}()
|
|
|
|
testCases := []struct {
|
|
desc string
|
|
kerberosCfg kerberos.KerberosAuth
|
|
dataSource DataSourceInfo
|
|
expConnStr string
|
|
}{
|
|
{
|
|
desc: "Use Kerberos Credential Cache",
|
|
kerberosCfg: kerberos.KerberosAuth{
|
|
CredentialCache: "/tmp/krb5cc_1000",
|
|
ConfigFilePath: "/etc/krb5.conf",
|
|
UDPConnectionLimit: 1,
|
|
},
|
|
dataSource: DataSourceInfo{
|
|
URL: "localhost",
|
|
Database: "database",
|
|
JsonData: JsonData{
|
|
AuthenticationType: "Windows AD: Credential cache",
|
|
},
|
|
},
|
|
expConnStr: "authenticator=krb5;krb5-configfile=/etc/krb5.conf;server=localhost;database=database;krb5-credcachefile=/tmp/krb5cc_1000;",
|
|
},
|
|
{
|
|
desc: "Use Kerberos Credential Cache File path",
|
|
kerberosCfg: kerberos.KerberosAuth{
|
|
CredentialCacheLookupFile: tmpFile,
|
|
ConfigFilePath: "/etc/krb5.conf",
|
|
UDPConnectionLimit: 1,
|
|
},
|
|
dataSource: DataSourceInfo{
|
|
URL: "example.host",
|
|
Database: "testDB",
|
|
User: "testUser",
|
|
JsonData: JsonData{
|
|
AuthenticationType: "Windows AD: Credential cache file",
|
|
},
|
|
},
|
|
expConnStr: "authenticator=krb5;krb5-configfile=/etc/krb5.conf;server=example.host;database=testDB;krb5-credcachefile=/tmp/cache;",
|
|
},
|
|
{
|
|
desc: "Use Kerberos Keytab",
|
|
kerberosCfg: kerberos.KerberosAuth{
|
|
KeytabFilePath: "/foo/bar.keytab",
|
|
ConfigFilePath: "/etc/krb5.conf",
|
|
UDPConnectionLimit: 1,
|
|
},
|
|
dataSource: DataSourceInfo{
|
|
URL: "localhost",
|
|
Database: "database",
|
|
User: "foo@test.lab",
|
|
JsonData: JsonData{
|
|
AuthenticationType: "Windows AD: Keytab",
|
|
},
|
|
},
|
|
expConnStr: "authenticator=krb5;krb5-configfile=/etc/krb5.conf;server=localhost;database=database;user id=foo@test.lab;krb5-keytabfile=/foo/bar.keytab;",
|
|
},
|
|
{
|
|
desc: "Use Kerberos Username and Password",
|
|
kerberosCfg: kerberos.KerberosAuth{
|
|
ConfigFilePath: "/etc/krb5.conf",
|
|
UDPConnectionLimit: 1,
|
|
},
|
|
dataSource: DataSourceInfo{
|
|
URL: "localhost",
|
|
Database: "database",
|
|
User: "foo@test.lab",
|
|
DecryptedSecureJSONData: map[string]string{
|
|
"password": "foo",
|
|
},
|
|
JsonData: JsonData{
|
|
AuthenticationType: "Windows AD: Username + password",
|
|
},
|
|
},
|
|
expConnStr: "authenticator=krb5;krb5-configfile=/etc/krb5.conf;server=localhost;database=database;user id=foo@test.lab;password=foo;",
|
|
},
|
|
{
|
|
desc: "Use non-default UDP connection limit",
|
|
kerberosCfg: kerberos.KerberosAuth{
|
|
ConfigFilePath: "/etc/krb5.conf",
|
|
UDPConnectionLimit: 0,
|
|
},
|
|
dataSource: DataSourceInfo{
|
|
URL: "localhost",
|
|
Database: "database",
|
|
User: "foo@test.lab",
|
|
DecryptedSecureJSONData: map[string]string{
|
|
"password": "foo",
|
|
},
|
|
JsonData: JsonData{
|
|
AuthenticationType: "Windows AD: Username + password",
|
|
},
|
|
},
|
|
expConnStr: "authenticator=krb5;krb5-configfile=/etc/krb5.conf;server=localhost;database=database;user id=foo@test.lab;password=foo;krb5-udppreferencelimit=0;",
|
|
},
|
|
|
|
{
|
|
desc: "From URL w/ port",
|
|
dataSource: DataSourceInfo{
|
|
URL: "localhost:1001",
|
|
Database: "database",
|
|
User: "user",
|
|
JsonData: JsonData{},
|
|
},
|
|
expConnStr: "server=localhost;database=database;user id=user;password=;port=1001;",
|
|
},
|
|
// When no port is specified, the driver should be allowed to choose
|
|
{
|
|
desc: "From URL w/o port",
|
|
dataSource: DataSourceInfo{
|
|
URL: "localhost",
|
|
Database: "database",
|
|
User: "user",
|
|
JsonData: JsonData{},
|
|
},
|
|
expConnStr: "server=localhost;database=database;user id=user;password=;",
|
|
},
|
|
// Port 0 should be equivalent to not specifying a port, i.e. let the driver choose
|
|
{
|
|
desc: "From URL w port 0",
|
|
dataSource: DataSourceInfo{
|
|
URL: "localhost:0",
|
|
Database: "database",
|
|
User: "user",
|
|
JsonData: JsonData{},
|
|
},
|
|
expConnStr: "server=localhost;database=database;user id=user;password=;",
|
|
},
|
|
{
|
|
desc: "With instance name",
|
|
dataSource: DataSourceInfo{
|
|
URL: "localhost\\instance",
|
|
Database: "database",
|
|
User: "user",
|
|
JsonData: JsonData{},
|
|
},
|
|
expConnStr: "server=localhost\\instance;database=database;user id=user;password=;",
|
|
},
|
|
{
|
|
desc: "With instance name and port",
|
|
dataSource: DataSourceInfo{
|
|
URL: "localhost\\instance:333",
|
|
Database: "database",
|
|
User: "user",
|
|
JsonData: JsonData{},
|
|
},
|
|
expConnStr: "server=localhost\\instance;database=database;user id=user;password=;port=333;",
|
|
},
|
|
{
|
|
desc: "With instance name and ApplicationIntent",
|
|
dataSource: DataSourceInfo{
|
|
URL: "localhost\\instance;ApplicationIntent=ReadOnly",
|
|
Database: "database",
|
|
User: "user",
|
|
JsonData: JsonData{},
|
|
},
|
|
expConnStr: "server=localhost\\instance;ApplicationIntent=ReadOnly;database=database;user id=user;password=;",
|
|
},
|
|
{
|
|
desc: "With ApplicationIntent instance name and port",
|
|
dataSource: DataSourceInfo{
|
|
URL: "localhost\\instance:333;ApplicationIntent=ReadOnly",
|
|
Database: "database",
|
|
User: "user",
|
|
JsonData: JsonData{},
|
|
},
|
|
expConnStr: "server=localhost\\instance;database=database;user id=user;password=;port=333;ApplicationIntent=ReadOnly;",
|
|
},
|
|
{
|
|
desc: "With instance name",
|
|
dataSource: DataSourceInfo{
|
|
URL: "localhost\\instance",
|
|
Database: "database",
|
|
User: "user",
|
|
JsonData: JsonData{},
|
|
},
|
|
expConnStr: "server=localhost\\instance;database=database;user id=user;password=;",
|
|
},
|
|
{
|
|
desc: "With instance name and port",
|
|
dataSource: DataSourceInfo{
|
|
URL: "localhost\\instance:333",
|
|
Database: "database",
|
|
User: "user",
|
|
JsonData: JsonData{},
|
|
},
|
|
expConnStr: "server=localhost\\instance;database=database;user id=user;password=;port=333;",
|
|
},
|
|
{
|
|
desc: "With instance name and ApplicationIntent",
|
|
dataSource: DataSourceInfo{
|
|
URL: "localhost\\instance;ApplicationIntent=ReadOnly",
|
|
Database: "database",
|
|
User: "user",
|
|
JsonData: JsonData{},
|
|
},
|
|
expConnStr: "server=localhost\\instance;ApplicationIntent=ReadOnly;database=database;user id=user;password=;",
|
|
},
|
|
{
|
|
desc: "With ApplicationIntent instance name and port",
|
|
dataSource: DataSourceInfo{
|
|
URL: "localhost\\instance:333;ApplicationIntent=ReadOnly",
|
|
Database: "database",
|
|
User: "user",
|
|
JsonData: JsonData{},
|
|
},
|
|
expConnStr: "server=localhost\\instance;database=database;user id=user;password=;port=333;ApplicationIntent=ReadOnly;",
|
|
},
|
|
{
|
|
desc: "Defaults",
|
|
dataSource: DataSourceInfo{
|
|
Database: "database",
|
|
User: "user",
|
|
JsonData: JsonData{},
|
|
},
|
|
expConnStr: "server=localhost;database=database;user id=user;password=;",
|
|
},
|
|
}
|
|
|
|
logger := backend.NewLoggerWith("logger", "mssql.test")
|
|
|
|
for _, tc := range testCases {
|
|
t.Run(tc.desc, func(t *testing.T) {
|
|
connStr, err := generateConnectionString(tc.dataSource, nil, tc.kerberosCfg, logger, nil, "")
|
|
require.NoError(t, err)
|
|
assert.Equal(t, tc.expConnStr, connStr)
|
|
})
|
|
}
|
|
}
|