Files
grafana/pkg/tsdb/testdatasource/csv_data.go
Will Browne b80fbe03f0 Plugins: Refactor Plugin Management (#40477)
* add core plugin flow

* add instrumentation

* move func

* remove cruft

* support external backend plugins

* refactor + clean up

* remove comments

* refactor loader

* simplify core plugin path arg

* cleanup loggers

* move signature validator to plugins package

* fix sig packaging

* cleanup plugin model

* remove unnecessary plugin field

* add start+stop for pm

* fix failures

* add decommissioned state

* export fields just to get things flowing

* fix comments

* set static routes

* make image loading idempotent

* merge with backend plugin manager

* re-use funcs

* reorder imports + remove unnecessary interface

* add some TODOs + remove unused func

* remove unused instrumentation func

* simplify client usage

* remove import alias

* re-use backendplugin.Plugin interface

* re order funcs

* improve var name

* fix log statements

* refactor data model

* add logic for dupe check during loading

* cleanup state setting

* refactor loader

* cleanup manager interface

* add rendering flow

* refactor loading + init

* add renderer support

* fix renderer plugin

* reformat imports

* track errors

* fix plugin signature inheritance

* name param in interface

* update func comment

* fix func arg name

* introduce class concept

* remove func

* fix external plugin check

* apply changes from pm-experiment

* fix core plugins

* fix imports

* rename interface

* comment API interface

* add support for testdata plugin

* enable alerting + use correct core plugin contracts

* slim manager API

* fix param name

* fix filter

* support static routes

* fix rendering

* tidy rendering

* get tests compiling

* fix install+uninstall

* start finder test

* add finder test coverage

* start loader tests

* add test for core plugins

* load core + bundled test

* add test for nested plugin loading

* add test files

* clean interface + fix registering some core plugins

* refactoring

* reformat and create sub packages

* simplify core plugin init

* fix ctx cancel scenario

* migrate initializer

* remove Init() funcs

* add test starter

* new logger

* flesh out initializer tests

* refactoring

* remove unused svc

* refactor rendering flow

* fixup loader tests

* add enabled helper func

* fix logger name

* fix data fetchers

* fix case where plugin dir doesn't exist

* improve coverage + move dupe checking to loader

* remove noisy debug logs

* register core plugins automagically

* add support for renderer in catalog

* make private func + fix req validation

* use interface

* re-add check for renderer in catalog

* tidy up from moving to auto reg core plugins

* core plugin registrar

* guards

* copy over core plugins for test infra

* all tests green

* renames

* propagate new interfaces

* kill old manager

* get compiling

* tidy up

* update naming

* refactor manager test + cleanup

* add more cases to finder test

* migrate validator to field

* more coverage

* refactor dupe checking

* add test for plugin class

* add coverage for initializer

* split out rendering

* move

* fixup tests

* fix uss test

* fix frontend settings

* fix grafanads test

* add check when checking sig errors

* fix enabled map

* fixup

* allow manual setup of CM

* rename to cloud-monitoring

* remove TODO

* add installer interface for testing

* loader interface returns

* tests passing

* refactor + add more coverage

* support 'stackdriver'

* fix frontend settings loading

* improve naming based on package name

* small tidy

* refactor test

* fix renderer start

* make cloud-monitoring plugin ID clearer

* add plugin update test

* add integration tests

* don't break all if sig can't be calculated

* add root URL check test

* add more signature verification tests

* update DTO name

* update enabled plugins comment

* update comments

* fix linter

* revert fe naming change

* fix errors endpoint

* reset error code field name

* re-order test to help verify

* assert -> require

* pm check

* add missing entry + re-order

* re-check

* dump icon log

* verify manager contents first

* reformat

* apply PR feedback

* apply style changes

* fix one vs all loading err

* improve log output

* only start when no signature error

* move log

* rework plugin update check

* fix test

* fix multi loading from cfg.PluginSettings

* improve log output #2

* add error abstraction to capture errors without registering a plugin

* add debug log

* add unsigned warning

* e2e test attempt

* fix logger

* set home path

* prevent panic

* alternate

* ugh.. fix home path

* return renderer even if not started

* make renderer plugin managed

* add fallback renderer icon, update renderer badge + prevent changes when renderer is installed

* fix icon loading

* rollback renderer changes

* use correct field

* remove unneccessary block

* remove newline

* remove unused func

* fix bundled plugins base + module fields

* remove unused field since refactor

* add authorizer abstraction

* loader only returns plugins expected to run

* fix multi log output
2021-11-01 10:53:33 +01:00

278 lines
6.5 KiB
Go

package testdatasource
import (
"context"
"encoding/csv"
"errors"
"fmt"
"io"
"os"
"path/filepath"
"regexp"
"strconv"
"strings"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/components/simplejson"
)
func (s *Service) handleCsvContentScenario(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
resp := backend.NewQueryDataResponse()
for _, q := range req.Queries {
model, err := simplejson.NewJson(q.JSON)
if err != nil {
return nil, fmt.Errorf("failed to parse query json: %v", err)
}
csvContent := model.Get("csvContent").MustString()
alias := model.Get("alias").MustString("")
frame, err := LoadCsvContent(strings.NewReader(csvContent), alias)
if err != nil {
return nil, err
}
respD := resp.Responses[q.RefID]
respD.Frames = append(respD.Frames, frame)
resp.Responses[q.RefID] = respD
}
return resp, nil
}
func (s *Service) handleCsvFileScenario(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
resp := backend.NewQueryDataResponse()
for _, q := range req.Queries {
model, err := simplejson.NewJson(q.JSON)
if err != nil {
return nil, fmt.Errorf("failed to parse query json %v", err)
}
fileName := model.Get("csvFileName").MustString()
if len(fileName) == 0 {
continue
}
frame, err := s.loadCsvFile(fileName)
if err != nil {
return nil, err
}
respD := resp.Responses[q.RefID]
respD.Frames = append(respD.Frames, frame)
resp.Responses[q.RefID] = respD
}
return resp, nil
}
func (s *Service) loadCsvFile(fileName string) (*data.Frame, error) {
validFileName := regexp.MustCompile(`([\w_]+)\.csv`)
if !validFileName.MatchString(fileName) {
return nil, fmt.Errorf("invalid csv file name: %q", fileName)
}
filePath := filepath.Join(s.cfg.StaticRootPath, "testdata", fileName)
// Can ignore gosec G304 here, because we check the file pattern above
// nolint:gosec
fileReader, err := os.Open(filePath)
if err != nil {
return nil, fmt.Errorf("failed open file: %v", err)
}
defer func() {
if err := fileReader.Close(); err != nil {
s.logger.Warn("Failed to close file", "err", err, "path", fileName)
}
}()
return LoadCsvContent(fileReader, fileName)
}
// LoadCsvContent should be moved to the SDK
func LoadCsvContent(ioReader io.Reader, name string) (*data.Frame, error) {
reader := csv.NewReader(ioReader)
// Read the header records
headerFields, err := reader.Read()
if err != nil {
return nil, fmt.Errorf("failed to read header line: %v", err)
}
fields := []*data.Field{}
fieldNames := []string{}
fieldRawValues := [][]string{}
for _, fieldName := range headerFields {
fieldNames = append(fieldNames, strings.Trim(fieldName, " "))
fieldRawValues = append(fieldRawValues, []string{})
}
for {
lineValues, err := reader.Read()
if errors.Is(err, io.EOF) {
break // reached end of the file
} else if err != nil {
return nil, fmt.Errorf("failed to read line: %v", err)
}
for fieldIndex, value := range lineValues {
fieldRawValues[fieldIndex] = append(fieldRawValues[fieldIndex], strings.Trim(value, " "))
}
}
longest := 0
for fieldIndex, rawValues := range fieldRawValues {
fieldName := fieldNames[fieldIndex]
field, err := csvValuesToField(rawValues)
if err == nil {
// Check if the values are actually a time field
if strings.Contains(strings.ToLower(fieldName), "time") {
timeField := toTimeField(field)
if timeField != nil {
field = timeField
}
}
field.Name = fieldName
fields = append(fields, field)
if field.Len() > longest {
longest = field.Len()
}
}
}
// Make all fields the same length
for _, field := range fields {
delta := field.Len() - longest
if delta > 0 {
field.Extend(delta)
}
}
frame := data.NewFrame(name, fields...)
return frame, nil
}
func csvLineToField(stringInput string) (*data.Field, error) {
return csvValuesToField(strings.Split(strings.ReplaceAll(stringInput, " ", ""), ","))
}
func csvValuesToField(parts []string) (*data.Field, error) {
if len(parts) < 1 {
return nil, fmt.Errorf("csv must have at least one value")
}
first := strings.ToUpper(parts[0])
if first == "T" || first == "F" || first == "TRUE" || first == "FALSE" {
field := data.NewFieldFromFieldType(data.FieldTypeNullableBool, len(parts))
for idx, strVal := range parts {
strVal = strings.ToUpper(strVal)
if strVal == "NULL" || strVal == "" {
continue
}
field.SetConcrete(idx, strVal == "T" || strVal == "TRUE")
}
return field, nil
}
// Try parsing values as numbers
ok := false
field := data.NewFieldFromFieldType(data.FieldTypeNullableInt64, len(parts))
for idx, strVal := range parts {
if strVal == "null" || strVal == "" {
continue
}
val, err := strconv.ParseInt(strVal, 10, 64)
if err != nil {
ok = false
break
}
field.SetConcrete(idx, val)
ok = true
}
if ok {
return field, nil
}
// Maybe floats
field = data.NewFieldFromFieldType(data.FieldTypeNullableFloat64, len(parts))
for idx, strVal := range parts {
if strVal == "null" || strVal == "" {
continue
}
val, err := strconv.ParseFloat(strVal, 64)
if err != nil {
ok = false
break
}
field.SetConcrete(idx, val)
ok = true
}
if ok {
return field, nil
}
// Replace empty strings with null
field = data.NewFieldFromFieldType(data.FieldTypeNullableString, len(parts))
for idx, strVal := range parts {
if strVal == "null" || strVal == "" {
continue
}
field.SetConcrete(idx, strVal)
}
return field, nil
}
// This will try to convert the values to a timestamp
func toTimeField(field *data.Field) *data.Field {
found := false
count := field.Len()
timeField := data.NewFieldFromFieldType(data.FieldTypeNullableTime, count)
timeField.Config = field.Config
timeField.Name = field.Name
timeField.Labels = field.Labels
ft := field.Type()
if ft.Numeric() {
for i := 0; i < count; i++ {
v, err := field.FloatAt(i)
if err == nil {
t := time.Unix(0, int64(v)*int64(time.Millisecond))
timeField.SetConcrete(i, t.UTC())
found = true
}
}
if !found {
return nil
}
return timeField
}
if ft == data.FieldTypeNullableString || ft == data.FieldTypeString {
for i := 0; i < count; i++ {
v, ok := field.ConcreteAt(i)
if ok && v != nil {
t, err := time.Parse(time.RFC3339, v.(string))
if err == nil {
timeField.SetConcrete(i, t.UTC())
found = true
}
}
}
if !found {
return nil
}
return timeField
}
return nil
}