Compare commits
45 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5f9e0ace7e | ||
|
|
1c9491ddce | ||
|
|
1b55d09581 | ||
|
|
2b60228f42 | ||
|
|
e4fd9da88e | ||
|
|
d5ff95cc42 | ||
|
|
2d50e66b34 | ||
|
|
8135ff40d6 | ||
|
|
b06e795c27 | ||
|
|
4df843af2c | ||
|
|
fe84e221a1 | ||
|
|
8c1106e4d0 | ||
|
|
9879af40c1 | ||
|
|
3cb04ea811 | ||
|
|
5cbf4fa459 | ||
|
|
f23efb1bbd | ||
|
|
d1031e7a28 | ||
|
|
710103dcd3 | ||
|
|
65e55395e5 | ||
|
|
d27e28c3ff | ||
|
|
591e0760ab | ||
|
|
8f2373964a | ||
|
|
b2d39a1791 | ||
|
|
59df6f707b | ||
|
|
9043a10dfb | ||
|
|
f314f82791 | ||
|
|
cb97b1bc7b | ||
|
|
a0fdec0209 | ||
|
|
d3f81e5f5f | ||
|
|
6d97b7998b | ||
|
|
58261a814a | ||
|
|
bbe9c1bd2a | ||
|
|
d1c4560d6e | ||
|
|
e5520833d2 | ||
|
|
56e30b8aff | ||
|
|
623cd8f41c | ||
|
|
3b8707ddb4 | ||
|
|
5f3fef7789 | ||
|
|
990cac09b9 | ||
|
|
13acd134c8 | ||
|
|
18426f19d6 | ||
|
|
40831fa5a1 | ||
|
|
9a39ce480a | ||
|
|
d2012e4623 | ||
|
|
98bd3e89b2 |
@@ -116,7 +116,7 @@ exports[`no enzyme tests`] = {
|
||||
"packages/jaeger-ui-components/src/TraceTimelineViewer/SpanDetail/AccordianText.test.js:1966455998": [
|
||||
[14, 17, 13, "RegExp match", "2409514259"]
|
||||
],
|
||||
"packages/jaeger-ui-components/src/TraceTimelineViewer/SpanDetail/KeyValuesTable.test.js:3813002651": [
|
||||
"packages/jaeger-ui-components/src/TraceTimelineViewer/SpanDetail/KeyValuesTable.test.js:3568627238": [
|
||||
[14, 19, 13, "RegExp match", "2409514259"]
|
||||
],
|
||||
"packages/jaeger-ui-components/src/TraceTimelineViewer/SpanDetail/TextList.test.js:3006381933": [
|
||||
|
||||
@@ -21,7 +21,7 @@ DRONE := $(GOBIN)/drone-v1.4.0
|
||||
$(DRONE): $(BINGO_DIR)/drone.mod
|
||||
@# Install binary/ries using Go 1.14+ build command. This is using bwplotka/bingo-controlled, separate go module with pinned dependencies.
|
||||
@echo "(re)installing $(GOBIN)/drone-v1.4.0"
|
||||
@cd $(BINGO_DIR) && $(GO) build -mod=mod -modfile=drone.mod -o=$(GOBIN)/drone-v1.4.0 "github.com/drone/drone-cli/drone"
|
||||
@cd $(BINGO_DIR) && CGO_ENABLED=0 $(GO) build -mod=mod -modfile=drone.mod -o=$(GOBIN)/drone-v1.4.0 "github.com/drone/drone-cli/drone"
|
||||
|
||||
WIRE := $(GOBIN)/wire-v0.5.0
|
||||
$(WIRE): $(BINGO_DIR)/wire.mod
|
||||
|
||||
785
.drone.yml
785
.drone.yml
File diff suppressed because it is too large
Load Diff
11
.github/CODEOWNERS
vendored
11
.github/CODEOWNERS
vendored
@@ -25,9 +25,14 @@ go.sum @grafana/backend-platform
|
||||
/.bingo @grafana/backend-platform
|
||||
|
||||
# Continuous Integration
|
||||
.drone.yml @grafana/grafana-release-eng
|
||||
.drone.star @grafana/grafana-release-eng
|
||||
/scripts/drone/ @grafana/grafana-release-eng
|
||||
.drone.yml @grafana/grafana-delivery
|
||||
.drone.star @grafana/grafana-delivery
|
||||
/scripts/drone/ @grafana/grafana-delivery
|
||||
/pkg/build/ @grafana/grafana-delivery
|
||||
/.dockerignore @grafana/grafana-delivery
|
||||
/Dockerfile @grafana/grafana-delivery
|
||||
/Makefile @grafana/grafana-delivery
|
||||
/scripts/build/ @grafana/grafana-delivery
|
||||
|
||||
# Cloud Datasources backend code
|
||||
/pkg/tsdb/cloudwatch @grafana/cloud-datasources
|
||||
|
||||
34
.github/workflows/detect-breaking-changes-build-skip.yml
vendored
Normal file
34
.github/workflows/detect-breaking-changes-build-skip.yml
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
# Workflow for skipping the Levitate detection
|
||||
# (This is needed because workflows that are skipped due to path filtering will show up as pending in Github.
|
||||
# As this has the same name as the one in detect-breaking-changes-build.yml it will take over in these cases and succeed quickly.)
|
||||
|
||||
name: Levitate / Detect breaking changes
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- "packages/**"
|
||||
branches:
|
||||
- 'main'
|
||||
|
||||
jobs:
|
||||
detect:
|
||||
name: Detect breaking changes
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Skipping
|
||||
run: echo "No modifications in the public API (packages/), skipping."
|
||||
|
||||
# Build and persist output as a JSON (we need to tell the report workflow that the check has been skipped)
|
||||
- name: Persisting the check output
|
||||
run: |
|
||||
mkdir -p ./levitate
|
||||
echo "{ \"shouldSkip\": true }" > ./levitate/result.json
|
||||
|
||||
# Upload artifact (so it can be used in the more privileged "report" workflow)
|
||||
- name: Upload check output as artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: levitate
|
||||
path: levitate/
|
||||
@@ -1,6 +1,11 @@
|
||||
name: Levitate / Detect breaking changes
|
||||
|
||||
on: pull_request
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'packages/**'
|
||||
branches:
|
||||
- 'main'
|
||||
|
||||
jobs:
|
||||
buildPR:
|
||||
|
||||
@@ -16,7 +16,11 @@ jobs:
|
||||
uses: "actions/checkout@v3"
|
||||
|
||||
- name: "Clone website-sync Action"
|
||||
run: "git clone --single-branch --no-tags --depth 1 -b master https://grafanabot:${{ secrets.GH_BOT_ACCESS_TOKEN }}@github.com/grafana/website-sync ./.github/actions/website-sync"
|
||||
# WEBSITE_SYNC_TOKEN is a fine-grained GitHub Personal Access Token that expires.
|
||||
# It must be regenerated in the grafanabot GitHub account and requires a Grafana organization
|
||||
# GitHub administrator to update the organization secret.
|
||||
# The IT helpdesk can update the organization secret.
|
||||
run: "git clone --single-branch --no-tags --depth 1 -b master https://grafanabot:${{ secrets.WEBSITE_SYNC_TOKEN }}@github.com/grafana/website-sync ./.github/actions/website-sync"
|
||||
|
||||
- name: "Publish to website repository (next)"
|
||||
uses: "./.github/actions/website-sync"
|
||||
@@ -25,6 +29,10 @@ jobs:
|
||||
repository: "grafana/website"
|
||||
branch: "master"
|
||||
host: "github.com"
|
||||
github_pat: "${{ secrets.GH_BOT_ACCESS_TOKEN }}"
|
||||
# PUBLISH_TO_WEBSITE_TOKEN is a fine-grained GitHub Personal Access Token that expires.
|
||||
# It must be regenerated in the grafanabot GitHub account and requires a Grafana organization
|
||||
# GitHub administrator to update the organization secret.
|
||||
# The IT helpdesk can update the organization secret.
|
||||
github_pat: "grafanabot:${{ secrets.PUBLISH_TO_WEBSITE_TOKEN }}"
|
||||
source_folder: "docs/sources"
|
||||
target_folder: "content/docs/grafana/next"
|
||||
|
||||
@@ -36,6 +36,27 @@ jobs:
|
||||
release_tag_regexp: "^v(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)$"
|
||||
release_branch_regexp: "^v(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.x$"
|
||||
|
||||
- name: "Generate packages_api docs"
|
||||
uses: "actions/setup-node@v3.2.0"
|
||||
id: "generate-packages_api-docs"
|
||||
with:
|
||||
node-version: '16'
|
||||
|
||||
- name: "Get yarn cache directory path"
|
||||
id: "yarn-cache-dir-path"
|
||||
run: "echo ::set-output name=dir::$(yarn config get cacheFolder)"
|
||||
|
||||
- uses: "actions/cache@v2.1.7"
|
||||
with:
|
||||
path: "${{ steps.yarn-cache-dir-path.outputs.dir }}"
|
||||
key: "yarn-${{ hashFiles('**/yarn.lock') }}"
|
||||
restore-keys: |
|
||||
yarn-
|
||||
|
||||
- run: "yarn install --immutable"
|
||||
|
||||
- run: "./scripts/ci-reference-docs-build.sh"
|
||||
|
||||
- name: "Determine technical documentation version"
|
||||
if: "steps.has-matching-release-tag.outputs.bool == 'true'"
|
||||
uses: "./actions/docs-target"
|
||||
@@ -45,7 +66,11 @@ jobs:
|
||||
|
||||
- name: "Clone website-sync Action"
|
||||
if: "steps.has-matching-release-tag.outputs.bool == 'true'"
|
||||
run: "git clone --single-branch --no-tags --depth 1 -b master https://grafanabot:${{ secrets.GH_BOT_ACCESS_TOKEN }}@github.com/grafana/website-sync ./.github/actions/website-sync"
|
||||
# WEBSITE_SYNC_TOKEN is a fine-grained GitHub Personal Access Token that expires.
|
||||
# It must be regenerated in the grafanabot GitHub account and requires a Grafana organization
|
||||
# GitHub administrator to update the organization secret.
|
||||
# The IT helpdesk can update the organization secret.
|
||||
run: "git clone --single-branch --no-tags --depth 1 -b master https://grafanabot:${{ secrets.WEBSITE_SYNC_TOKEN }}@github.com/grafana/website-sync ./.github/actions/website-sync"
|
||||
|
||||
- name: "Publish to website repository (release)"
|
||||
if: "steps.has-matching-release-tag.outputs.bool == 'true'"
|
||||
@@ -55,6 +80,10 @@ jobs:
|
||||
repository: "grafana/website"
|
||||
branch: "master"
|
||||
host: "github.com"
|
||||
github_pat: "${{ secrets.GH_BOT_ACCESS_TOKEN }}"
|
||||
# PUBLISH_TO_WEBSITE_TOKEN is a fine-grained GitHub Personal Access Token that expires.
|
||||
# It must be regenerated in the grafanabot GitHub account and requires a Grafana organization
|
||||
# GitHub administrator to update the organization secret.
|
||||
# The IT helpdesk can update the organization secret.
|
||||
github_pat: "grafanabot:${{ secrets.PUBLISH_TO_WEBSITE_TOKEN }}"
|
||||
source_folder: "docs/sources"
|
||||
target_folder: "content/docs/grafana/${{ steps.target.outputs.target }}"
|
||||
|
||||
22
CHANGELOG.md
22
CHANGELOG.md
@@ -1,3 +1,25 @@
|
||||
<!-- 8.5.22 START -->
|
||||
|
||||
# 8.5.22 (2023-03-22)
|
||||
|
||||
<!-- 8.5.22 END -->
|
||||
|
||||
<!-- 8.5.21 START -->
|
||||
|
||||
# 8.5.21 (2023-02-28)
|
||||
|
||||
<!-- 8.5.21 END -->
|
||||
|
||||
<!-- 8.5.20 START -->
|
||||
|
||||
# 8.5.20 (2023-01-25)
|
||||
|
||||
### Features and enhancements
|
||||
|
||||
- **Chore:** Upgrade Go to 1.19.4 [v8.5.x]. [#60824](https://github.com/grafana/grafana/pull/60824), [@sakjur](https://github.com/sakjur)
|
||||
|
||||
<!-- 8.5.20 END -->
|
||||
|
||||
<!-- 8.5.15 START -->
|
||||
|
||||
# 8.5.15 (2022-11-08)
|
||||
|
||||
@@ -20,7 +20,7 @@ COPY emails emails
|
||||
ENV NODE_ENV production
|
||||
RUN yarn build
|
||||
|
||||
FROM golang:1.19.4-alpine3.15 as go-builder
|
||||
FROM golang:1.19.9-alpine3.17 as go-builder
|
||||
|
||||
RUN apk add --no-cache gcc g++ make
|
||||
|
||||
@@ -40,7 +40,7 @@ RUN go mod verify
|
||||
RUN make build-go
|
||||
|
||||
# Final stage
|
||||
FROM alpine:3.15
|
||||
FROM alpine:3.17
|
||||
|
||||
LABEL maintainer="Grafana team <hello@grafana.com>"
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ COPY emails emails
|
||||
ENV NODE_ENV production
|
||||
RUN yarn build
|
||||
|
||||
FROM golang:1.19.4 AS go-builder
|
||||
FROM golang:1.19.8 AS go-builder
|
||||
|
||||
WORKDIR /src/grafana
|
||||
|
||||
|
||||
@@ -1,27 +1,28 @@
|
||||
.PHONY: pull docs docs-quick docs-no-pull docs-test docs-local-static
|
||||
|
||||
IMAGE = grafana/grafana-docs-dev:latest
|
||||
CONTENT_PATH = /hugo/content/docs/grafana/next
|
||||
PODMAN = $(shell if command -v podman >/dev/null 2>&1; then echo podman; else echo docker; fi)
|
||||
IMAGE = grafana/docs-base:latest
|
||||
CONTENT_PATH = /hugo/content/docs/grafana/latest
|
||||
LOCAL_STATIC_PATH = ../../website/static
|
||||
PORT = 3002:3002
|
||||
|
||||
pull:
|
||||
docker pull $(IMAGE)
|
||||
$(PODMAN) pull $(IMAGE)
|
||||
|
||||
docs: pull
|
||||
docker run -v $(shell pwd)/sources:$(CONTENT_PATH):Z -p $(PORT) --rm -it $(IMAGE) /bin/bash -c "make server"
|
||||
$(PODMAN) run --init -v $(shell pwd)/sources:$(CONTENT_PATH):Z -p $(PORT) --rm -it $(IMAGE) make server
|
||||
|
||||
docs-quick: pull
|
||||
docker run -v $(shell pwd)/sources:$(CONTENT_PATH):Z -p $(PORT) --rm -it $(IMAGE) /bin/bash -c "ln -s /frontend-docs/packages_api /hugo/content/docs/grafana/next/packages_api && make server-quick"
|
||||
docs-preview: pull
|
||||
$(PODMAN) run --init -v $(shell pwd)/sources:$(CONTENT_PATH):Z -p $(PORT) --rm -it $(IMAGE) make server BUILD_DRAFTS=true
|
||||
|
||||
docs-no-pull:
|
||||
docker run -v $(shell pwd)/sources:$(CONTENT_PATH):Z -p $(PORT) --rm -it $(IMAGE) /bin/bash -c "make server"
|
||||
$(PODMAN) run --init -v $(shell pwd)/sources:$(CONTENT_PATH):Z -p $(PORT) --rm -it $(IMAGE) make server
|
||||
|
||||
docs-test: pull
|
||||
docker run -v $(shell pwd)/sources:$(CONTENT_PATH):Z --rm -it $(IMAGE) /bin/bash -c 'make prod'
|
||||
$(PODMAN) run --init -v $(shell pwd)/sources:$(CONTENT_PATH):Z --rm -it $(IMAGE) make prod
|
||||
|
||||
# expects that you have grafana/website checked out in same path as the grafana repo.
|
||||
docs-local-static: pull
|
||||
if [ ! -d "$(LOCAL_STATIC_PATH)" ]; then echo "local path (website project) $(LOCAL_STATIC_PATH) not found"]; exit 1; fi
|
||||
docker run -v $(shell pwd)/sources:$(CONTENT_PATH):Z \
|
||||
$(PODMAN) run --init -v $(shell pwd)/sources:$(CONTENT_PATH):Z \
|
||||
-v $(shell pwd)/$(LOCAL_STATIC_PATH):/hugo/static:Z -p $(PORT) --rm -it $(IMAGE)
|
||||
|
||||
@@ -9,7 +9,7 @@ weight: 113
|
||||
|
||||
Grafana 8.0 has new and improved alerting that centralizes alerting information in a single, searchable view. It is enabled by default for all new OSS instances, and is an [opt-in]({{< relref "./opt-in.md" >}}) feature for older installations that still use legacy dashboard alerting. We encourage you to create issues in the Grafana GitHub repository for bugs found while testing Grafana alerting. See also, [What's New with Grafana alerting]({{< relref "./difference-old-new.md" >}}).
|
||||
|
||||
> Refer to [Fine-grained access control]({{< relref "../enterprise/access-control/_index.md" >}}) in Grafana Enterprise to learn more about controlling access to alerts using fine-grained permissions.
|
||||
> Refer to [Fine-grained access control]({{< relref "../../enterprise/access-control" >}}) in Grafana Enterprise to learn more about controlling access to alerts using fine-grained permissions.
|
||||
|
||||
When Grafana alerting is enabled, you can:
|
||||
|
||||
|
||||
@@ -55,3 +55,6 @@ You may allow users to sign-up via Google authentication by setting the
|
||||
`allow_sign_up` option to `true`. When this option is set to `true`, any
|
||||
user successfully authenticating via Google authentication will be
|
||||
automatically signed up.
|
||||
|
||||
You may specify a domain to be passed as `hd` query parameter accepted by Google's
|
||||
OAuth 2.0 authentication API. Refer to Google's OAuth [documentation](https://developers.google.com/identity/openid-connect/openid-connect#hd-param).
|
||||
|
||||
@@ -14,7 +14,7 @@ Dashboard snapshots are static . Queries and expressions cannot be re-executed f
|
||||
Before you begin, ensure that you have configured a data source. See also:
|
||||
|
||||
- [Working with Grafana dashboard UI]({{< relref "./dashboard-ui/_index.md" >}})
|
||||
- [Dashboard folders]({{< relref "./dashboard-folders.md" >}})
|
||||
- [Dashboard folders]({{< relref "./dashboard_folders" >}})
|
||||
- [Create dashboard]({{< relref "./dashboard-create" >}})
|
||||
- [Manage dashboards]({{< relref "./dashboard-manage.md" >}})
|
||||
- [Annotations]({{< relref "./annotations.md" >}})
|
||||
@@ -23,7 +23,7 @@ Before you begin, ensure that you have configured a data source. See also:
|
||||
- [Keyboard shortcuts]({{< relref "./shortcuts.md" >}})
|
||||
- [Reporting]({{< relref "./reporting.md" >}})
|
||||
- [Time range controls]({{< relref "./time-range-controls.md" >}})
|
||||
- [Dashboard version history]({{< relref "./dashboard-history.md" >}})
|
||||
- [Dashboard version history]({{< relref "./dashboard_history" >}})
|
||||
- [Dashboard export and import]({{< relref "./export-import.md" >}})
|
||||
- [Dashboard JSON model]({{< relref "./json-model.md" >}})
|
||||
- [Scripted dashboards]({{< relref "./scripted-dashboards.md" >}})
|
||||
|
||||
@@ -9,7 +9,6 @@ Here you can find detailed release notes that list everything that is included i
|
||||
about deprecations, breaking changes as well as changes that relate to plugin development.
|
||||
|
||||
- [Release notes for 8.5.13]({{< relref "release-notes-8-5-13" >}})
|
||||
- [Release notes for 8.5.11]({{< relref "release-notes-8-5-11" >}})
|
||||
- [Release notes for 8.5.10]({{< relref "release-notes-8-5-10" >}})
|
||||
- [Release notes for 8.5.9]({{< relref "release-notes-8-5-9" >}})
|
||||
- [Release notes for 8.5.6]({{< relref "release-notes-8-5-6" >}})
|
||||
@@ -20,7 +19,6 @@ about deprecations, breaking changes as well as changes that relate to plugin de
|
||||
- [Release notes for 8.5.1]({{< relref "release-notes-8-5-1" >}})
|
||||
- [Release notes for 8.5.0]({{< relref "release-notes-8-5-0" >}})
|
||||
- [Release notes for 8.5.0-beta1]({{< relref "release-notes-8-5-0-beta1" >}})
|
||||
- [Release notes for 8.4.10]({{< relref "release-notes-8-4-10" >}})
|
||||
- [Release notes for 8.4.7]({{< relref "release-notes-8-4-7" >}})
|
||||
- [Release notes for 8.4.6]({{< relref "release-notes-8-4-6" >}})
|
||||
- [Release notes for 8.4.5]({{< relref "release-notes-8-4-5" >}})
|
||||
|
||||
@@ -14,7 +14,7 @@ weight: 200
|
||||
|
||||
This page contains links to dashboards in Grafana Play with examples of template variables.
|
||||
|
||||
- [Elasticsearch Metrics](https://play.grafana.org/d/000000014/elasticsearch-metrics?orgId=1) - Uses ad hoc filters, global variables, and a custom variable.
|
||||
- [Elasticsearch Metrics](https://play.grafana.org/d/z8OZC66nk/elasticsearch-8-2-0-sample-flight-data?orgId=1) - Uses ad hoc filters, global variables, and a custom variable.
|
||||
- [Graphite Templated Nested](https://play.grafana.org/d/000000056/graphite-templated-nested?orgId=1) - Uses query variables, chained query variables, an interval variable, and a repeated panel.
|
||||
- [Influx DB Group By Variable](https://play.grafana.org/d/000000137/influxdb-group-by-variable?orgId=1) - Query variable, panel uses the variable results to group the metric data.
|
||||
- [InfluxDB Raw Query Template Var](https://play.grafana.org/d/000000083/influxdb-raw-query-template-var?orgId=1) - Uses query variables, chained query variables, and an interval variable.
|
||||
|
||||
14
go.mod
14
go.mod
@@ -27,7 +27,7 @@ require (
|
||||
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b
|
||||
github.com/centrifugal/centrifuge v0.19.0
|
||||
github.com/cortexproject/cortex v1.10.1-0.20211014125347-85c378182d0d
|
||||
github.com/crewjam/saml v0.4.9
|
||||
github.com/crewjam/saml v0.4.13
|
||||
github.com/davecgh/go-spew v1.1.1
|
||||
github.com/denisenkom/go-mssqldb v0.11.0
|
||||
github.com/dop251/goja v0.0.0-20210804101310-32956a348b49
|
||||
@@ -45,7 +45,7 @@ require (
|
||||
github.com/gogo/protobuf v1.3.2
|
||||
github.com/golang/mock v1.6.0
|
||||
github.com/golang/snappy v0.0.4
|
||||
github.com/google/go-cmp v0.5.8
|
||||
github.com/google/go-cmp v0.5.9
|
||||
github.com/google/uuid v1.3.0
|
||||
github.com/google/wire v0.5.0
|
||||
github.com/gorilla/websocket v1.4.2
|
||||
@@ -85,8 +85,8 @@ require (
|
||||
github.com/prometheus/prometheus v1.8.2-0.20211011171444-354d8d2ecfac
|
||||
github.com/robfig/cron v0.0.0-20180505203441-b41be1df6967
|
||||
github.com/robfig/cron/v3 v3.0.1
|
||||
github.com/russellhaering/goxmldsig v1.1.1
|
||||
github.com/stretchr/testify v1.7.0
|
||||
github.com/russellhaering/goxmldsig v1.2.0
|
||||
github.com/stretchr/testify v1.8.1
|
||||
github.com/teris-io/shortid v0.0.0-20171029131806-771a37caa5cf
|
||||
github.com/ua-parser/uap-go v0.0.0-20211112212520-00c877edfe0f
|
||||
github.com/uber/jaeger-client-go v2.29.1+incompatible
|
||||
@@ -174,7 +174,7 @@ require (
|
||||
github.com/go-openapi/validate v0.20.2 // indirect
|
||||
github.com/gogo/googleapis v1.4.1 // indirect
|
||||
github.com/gogo/status v1.1.0 // indirect
|
||||
github.com/golang-jwt/jwt/v4 v4.4.2 // indirect
|
||||
github.com/golang-jwt/jwt/v4 v4.4.3 // indirect
|
||||
github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe // indirect
|
||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b // indirect
|
||||
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
|
||||
@@ -233,7 +233,7 @@ require (
|
||||
github.com/shurcooL/sanitized_anchor_name v1.0.0 // indirect
|
||||
github.com/shurcooL/vfsgen v0.0.0-20200824052919-0d455de96546 // indirect
|
||||
github.com/sirupsen/logrus v1.8.1 // indirect
|
||||
github.com/stretchr/objx v0.2.0 // indirect
|
||||
github.com/stretchr/objx v0.5.0 // indirect
|
||||
github.com/uber/jaeger-lib v2.4.1+incompatible // indirect
|
||||
github.com/valyala/bytebufferpool v1.0.0 // indirect
|
||||
github.com/weaveworks/promrus v1.2.0 // indirect
|
||||
@@ -284,7 +284,7 @@ require (
|
||||
)
|
||||
|
||||
// Use fork of crewjam/saml with fixes for some issues until changes get merged into upstream
|
||||
replace github.com/crewjam/saml => github.com/grafana/saml v0.4.9-0.20221202084623-9d456850a65a
|
||||
replace github.com/crewjam/saml => github.com/grafana/saml v0.4.13-0.20230331080031-67cbfa09c7b6
|
||||
|
||||
replace github.com/apache/thrift => github.com/apache/thrift v0.14.1
|
||||
|
||||
|
||||
67
go.sum
67
go.sum
@@ -76,6 +76,7 @@ cuelang.org/go v0.4.0/go.mod h1:tz/edkPi+T37AZcb5GlPY+WJkL6KiDlDVupKwL3vvjs=
|
||||
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
||||
dmitri.shuralyov.com/gpu/mtl v0.0.0-20201218220906-28db891af037/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
||||
gioui.org v0.0.0-20210308172011-57750fc8a0a6/go.mod h1:RSH6KIUZ0p2xy5zHDxgAM4zumjgTw83q2ge/PI+yyw8=
|
||||
git.sr.ht/~sbinet/gg v0.3.1/go.mod h1:KGYtlADtqsqANL9ueOFkWymvzUvLMQllU5Ixo+8v3pc=
|
||||
github.com/AndreasBriese/bbloom v0.0.0-20190306092124-e2d15f34fcf9/go.mod h1:bOvUY6CB00SOBii9/FifXqc0awNKxLFCL/+pkDPuyl8=
|
||||
github.com/Azure/azure-amqp-common-go/v3 v3.0.0/go.mod h1:SY08giD/XbhTz07tJdpw1SoxQXHPN30+DI3Z04SYqyg=
|
||||
github.com/Azure/azure-amqp-common-go/v3 v3.2.1/go.mod h1:O6X1iYHP7s2x7NjUKsXVhkwWrQhxrd+d8/3rRadj4CI=
|
||||
@@ -286,7 +287,10 @@ github.com/aerospike/aerospike-client-go v1.27.0/go.mod h1:zj8LBEnWBDOVEIJt8LvaR
|
||||
github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c=
|
||||
github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4RqaHDIsdSBg7lM=
|
||||
github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY=
|
||||
github.com/ajstarks/deck v0.0.0-20200831202436-30c9fc6549a9/go.mod h1:JynElWSGnm/4RlzPXRlREEwqTHAN3T56Bv2ITsFT3gY=
|
||||
github.com/ajstarks/deck/generate v0.0.0-20210309230005-c3f852c02e19/go.mod h1:T13YZdzov6OU0A1+RfKZiZN9ca6VeKdBdyDV+BY97Tk=
|
||||
github.com/ajstarks/svgo v0.0.0-20180226025133-644b8db467af/go.mod h1:K08gAheRH3/J6wwsYMMT4xOr94bZjxIelGM0+d/wbFw=
|
||||
github.com/ajstarks/svgo v0.0.0-20211024235047-1546f124cd8b/go.mod h1:1KcenG0jGWcpt8ov532z81sp/kMMUG485J2InIOyADM=
|
||||
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
||||
@@ -440,6 +444,7 @@ github.com/bmizerany/pat v0.0.0-20170815010413-6226ea591a40/go.mod h1:8rLXio+Wji
|
||||
github.com/boltdb/bolt v1.3.1/go.mod h1:clJnj/oiGkjum5o1McbSZDSLxVThjynRyGBgiAx27Ps=
|
||||
github.com/bonitoo-io/go-sql-bigquery v0.3.4-1.4.0/go.mod h1:J4Y6YJm0qTWB9aFziB7cPeSyc6dOZFyJdteSeybVpXQ=
|
||||
github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
|
||||
github.com/boombuler/barcode v1.0.1/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
|
||||
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b h1:L/QXpzIa3pOvUGt1D1lA5KjYhPBAN/3iWdP7xeFS9F0=
|
||||
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b/go.mod h1:H0wQNHz2YrLsuXOZozoeDmnHXkNCRmMW0gwFWDfEZDA=
|
||||
github.com/bshuster-repo/logrus-logstash-hook v0.4.1/go.mod h1:zsTqEiSzDgAa/8GZR7E1qaXrhYNDKBYy5/dWPTIflbk=
|
||||
@@ -460,6 +465,7 @@ github.com/casbin/casbin/v2 v2.31.6/go.mod h1:vByNa/Fchek0KZUgG5wEsl7iFsiviAYKRt
|
||||
github.com/cenkalti/backoff v0.0.0-20181003080854-62661b46c409/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM=
|
||||
github.com/cenkalti/backoff v1.0.0/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM=
|
||||
github.com/cenkalti/backoff v2.0.0+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM=
|
||||
github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4=
|
||||
github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM=
|
||||
github.com/cenkalti/backoff/v4 v4.0.2/go.mod h1:eEew/i+1Q6OrCDZh3WiXYv3+nJwBASZ8Bog/87DQnVg=
|
||||
github.com/cenkalti/backoff/v4 v4.1.0/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw=
|
||||
@@ -474,6 +480,7 @@ github.com/centrifugal/protocol v0.7.6 h1:AfMwTZfwnFwZslIzQL4QtRnWSVO32RPSuk4iNS
|
||||
github.com/centrifugal/protocol v0.7.6/go.mod h1:cJo0/BuXglhPfg0fgSgTXvBZ7y+9rdg4+nPbIDOVmlA=
|
||||
github.com/certifi/gocertifi v0.0.0-20191021191039-0944d244cd40/go.mod h1:sGbDF6GwGcLpkNXPUTkMRoywsNa/ol15pxFe6ERfguA=
|
||||
github.com/cespare/xxhash v0.0.0-20181017004759-096ff4a8a059/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
||||
github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=
|
||||
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
||||
github.com/cespare/xxhash/v2 v2.1.0/go.mod h1:dgIUBU3pDso/gPgZ1osOZ0iQf77oPR28Tjxl5dIMyVM=
|
||||
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||
@@ -514,6 +521,7 @@ github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWH
|
||||
github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
|
||||
github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1 h1:zH8ljVhhq7yC0MIeUL/IviMtY8hx2mK8cN9wEYb8ggw=
|
||||
github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
|
||||
github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I=
|
||||
github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ=
|
||||
github.com/cockroachdb/apd/v2 v2.0.1/go.mod h1:DDxRlzC2lo3/vSlmSoS7JkqbbrARPuFOGr0B9pvN3Gw=
|
||||
github.com/cockroachdb/apd/v2 v2.0.2 h1:weh8u7Cneje73dDh+2tEVLUvyBc89iwepWCD8b8034E=
|
||||
@@ -651,6 +659,7 @@ github.com/cortexproject/cortex v1.10.1-0.20211014125347-85c378182d0d/go.mod h1:
|
||||
github.com/couchbase/go-couchbase v0.0.0-20180501122049-16db1f1fe037/go.mod h1:TWI8EKQMs5u5jLKW/tsb9VwauIrMIxQG1r5fMsswK5U=
|
||||
github.com/couchbase/gomemcached v0.0.0-20180502221210-0da75df14530/go.mod h1:srVSlQLB8iXBVXHgnqemxUXqN6FCvClgCMPCsjBDR7c=
|
||||
github.com/couchbase/goutils v0.0.0-20180530154633-e865a1461c8a/go.mod h1:BQwMFlJzDjFDG3DJUdU0KORxn88UlsOULuxLExMh3Hs=
|
||||
github.com/cpuguy83/go-md2man v1.0.10 h1:BSKMNlYxDvnunlTymqtgONjNnaRV1sTpcovwwjF22jk=
|
||||
github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.0 h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM=
|
||||
@@ -683,6 +692,7 @@ github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dchest/uniuri v1.2.0/go.mod h1:fSzm4SLHzNZvWLvWJew423PhAzkpNQYq+uNLq4kxhkY=
|
||||
github.com/deepmap/oapi-codegen v1.6.0/go.mod h1:ryDa9AgbELGeB+YEXE1dR53yAjHwFvE9iAUlWl9Al3M=
|
||||
github.com/deepmap/oapi-codegen v1.8.2 h1:SegyeYGcdi0jLLrpbCMoJxnUUn8GBXHsvr4rbzjuhfU=
|
||||
github.com/deepmap/oapi-codegen v1.8.2/go.mod h1:YLgSKSDv/bZQB7N4ws6luhozi3cEdRktEqrX88CvjIw=
|
||||
@@ -788,6 +798,7 @@ github.com/elazarl/goproxy v0.0.0-20220115173737-adb46da277ac h1:XDAn206aIqKPdF5
|
||||
github.com/elazarl/goproxy v0.0.0-20220115173737-adb46da277ac/go.mod h1:Ro8st/ElPeALwNFlcTpWmkr6IoMFfkjXAvTHpevnDsM=
|
||||
github.com/elazarl/goproxy/ext v0.0.0-20190711103511-473e67f1d7d2/go.mod h1:gNh8nYJoAm43RfaxurUnxr+N1PwuFV3ZMl/efxlIlY8=
|
||||
github.com/elazarl/goproxy/ext v0.0.0-20220115173737-adb46da277ac h1:9yrT5tmn9Zc0ytWPASlaPwQfQMQYnRf0RSDe1XvHw0Q=
|
||||
github.com/elazarl/goproxy/ext v0.0.0-20220115173737-adb46da277ac/go.mod h1:gNh8nYJoAm43RfaxurUnxr+N1PwuFV3ZMl/efxlIlY8=
|
||||
github.com/ema/qdisc v0.0.0-20190904071900-b82c76788043/go.mod h1:ix4kG2zvdUd8kEKSW0ZTr1XLks0epFpI4j745DXxlNE=
|
||||
github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs=
|
||||
github.com/emicklei/go-restful v2.9.5+incompatible/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs=
|
||||
@@ -856,6 +867,7 @@ github.com/gdamore/encoding v1.0.0/go.mod h1:alR0ol34c49FCSBLjhosxzcPHQbf2trDkoo
|
||||
github.com/gdamore/tcell v1.3.0/go.mod h1:Hjvr+Ofd+gLglo7RYKxxnzCBmev3BzsS67MebKS4zMM=
|
||||
github.com/getkin/kin-openapi v0.53.0/go.mod h1:7Yn5whZr5kJi6t+kShccXS8ae1APpYTW6yheSwk8Yi4=
|
||||
github.com/getkin/kin-openapi v0.61.0/go.mod h1:7Yn5whZr5kJi6t+kShccXS8ae1APpYTW6yheSwk8Yi4=
|
||||
github.com/getkin/kin-openapi v0.91.0/go.mod h1:660oXbgy5JFMKreazJaQTw7o+X00qeSyhcnluiMv+Xg=
|
||||
github.com/getkin/kin-openapi v0.94.0 h1:bAxg2vxgnHHHoeefVdmGbR+oxtJlcv5HsJJa3qmAHuo=
|
||||
github.com/getkin/kin-openapi v0.94.0/go.mod h1:LWZfzOd7PRy8GJ1dJ6mCU6tNdSfOwRac1BUPam4aw6Q=
|
||||
github.com/getsentry/raven-go v0.2.0/go.mod h1:KungGk8q33+aIAZUIVWZDr2OfAEBsO49PX4NzFV5kcQ=
|
||||
@@ -885,6 +897,7 @@ github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm
|
||||
github.com/go-fonts/dejavu v0.1.0/go.mod h1:4Wt4I4OU2Nq9asgDCteaAaWZOV24E+0/Pwo0gppep4g=
|
||||
github.com/go-fonts/latin-modern v0.2.0/go.mod h1:rQVLdDMK+mK1xscDwsqM5J8U2jrRa3T0ecnM9pNujks=
|
||||
github.com/go-fonts/liberation v0.1.1/go.mod h1:K6qoJYypsmfVjWg8KOVDQhLc8UDgIK2HYqyqAO9z7GY=
|
||||
github.com/go-fonts/liberation v0.2.0/go.mod h1:K6qoJYypsmfVjWg8KOVDQhLc8UDgIK2HYqyqAO9z7GY=
|
||||
github.com/go-fonts/stix v0.1.0/go.mod h1:w/c1f0ldAUlJmLBvlbkvVXLAD+tAMqobIIQpmnUIzUY=
|
||||
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||
@@ -898,6 +911,7 @@ github.com/go-kit/kit v0.11.0/go.mod h1:73/6Ixaufkvb5Osvkls8C79vuQ49Ba1rUEUYNSf+
|
||||
github.com/go-kit/log v0.1.0 h1:DGJh0Sm43HbOeYDNnVZFl8BvcYVvjD5bqYJvp0REbwQ=
|
||||
github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY=
|
||||
github.com/go-latex/latex v0.0.0-20210118124228-b3d85cf34e07/go.mod h1:CO1AlKB2CSIqUrmQPqA0gdRIlnLEY0gK5JGjh37zN5U=
|
||||
github.com/go-latex/latex v0.0.0-20210823091927-c0d11ff05a81/go.mod h1:SX0U8uGpxhq9o2S/CELCSUxEWWAuoCUcVCQWv7G2OCk=
|
||||
github.com/go-ldap/ldap v3.0.2+incompatible/go.mod h1:qfd9rJvER9Q0/D/Sqn1DfHRoBp40uXYvFoEVrNEPqRc=
|
||||
github.com/go-ldap/ldap/v3 v3.1.3/go.mod h1:3rbOH3jRS2u6jg2rJnKAMLE/xQyCKIveG2Sa/Cohzb8=
|
||||
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
|
||||
@@ -1037,6 +1051,8 @@ github.com/go-openapi/validate v0.19.15/go.mod h1:tbn/fdOwYHgrhPBzidZfJC2MIVvs9G
|
||||
github.com/go-openapi/validate v0.20.1/go.mod h1:b60iJT+xNNLfaQJUqLI7946tYiFEOuE9E4k54HpKcJ0=
|
||||
github.com/go-openapi/validate v0.20.2 h1:AhqDegYV3J3iQkMPJSXkvzymHKMTw0BST3RK3hTT4ts=
|
||||
github.com/go-openapi/validate v0.20.2/go.mod h1:e7OJoKNgd0twXZwIn0A43tHbvIcr/rZIVCbJBpTUoY0=
|
||||
github.com/go-pdf/fpdf v0.5.0/go.mod h1:HzcnA+A23uwogo0tp9yU+l3V+KXhiESpt1PMayhOh5M=
|
||||
github.com/go-pdf/fpdf v0.6.0/go.mod h1:HzcnA+A23uwogo0tp9yU+l3V+KXhiESpt1PMayhOh5M=
|
||||
github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
|
||||
github.com/go-playground/locales v0.12.1/go.mod h1:IUMDtCfWo/w/mtMfIE/IG2K+Ey3ygWanZIBtBW0W2TM=
|
||||
github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8=
|
||||
@@ -1044,6 +1060,7 @@ github.com/go-playground/universal-translator v0.16.0/go.mod h1:1AnU7NaIRDWWzGEK
|
||||
github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA=
|
||||
github.com/go-playground/validator/v10 v10.2.0/go.mod h1:uOYAAleCW8F/7oMFd6aG0GOhaH6EGOAJShg8Id5JGkI=
|
||||
github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4=
|
||||
github.com/go-redis/redis v6.15.9+incompatible h1:K0pv1D7EQUjfyoMql+r/jZqCLizCGKFlFgcHWWmHQjg=
|
||||
github.com/go-redis/redis v6.15.9+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA=
|
||||
github.com/go-redis/redis/v8 v8.0.0-beta.10.0.20200905143926-df7fe4e2ce72/go.mod h1:CJP1ZIHwhosNYwIdaHPZK9vHsM3+roNBaZ7U9Of1DXc=
|
||||
github.com/go-redis/redis/v8 v8.2.3/go.mod h1:ysgGY09J/QeDYbu3HikWEIPCwaeOkuNoTgKayTEaEOw=
|
||||
@@ -1137,8 +1154,8 @@ github.com/golang-jwt/jwt v3.2.1+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzq
|
||||
github.com/golang-jwt/jwt/v4 v4.0.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg=
|
||||
github.com/golang-jwt/jwt/v4 v4.1.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg=
|
||||
github.com/golang-jwt/jwt/v4 v4.2.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg=
|
||||
github.com/golang-jwt/jwt/v4 v4.4.2 h1:rcc4lwaZgFMCZ5jxF9ABolDcIHdBytAFgqFPbSJQAYs=
|
||||
github.com/golang-jwt/jwt/v4 v4.4.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
|
||||
github.com/golang-jwt/jwt/v4 v4.4.3 h1:Hxl6lhQFj4AnOX6MLrsCb/+7tCj7DxP7VA+2rDIq5AU=
|
||||
github.com/golang-jwt/jwt/v4 v4.4.3/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
|
||||
github.com/golang-migrate/migrate/v4 v4.7.0 h1:gONcHxHApDTKXDyLH/H97gEHmpu1zcnnbAaq2zgrPrs=
|
||||
github.com/golang-migrate/migrate/v4 v4.7.0/go.mod h1:Qvut3N4xKWjoH3sokBccML6WyHSnggXm/DvMMnTsQIc=
|
||||
github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe h1:lXe2qZdvpiX5WZkZR4hgp4KJVfY3nMkvmwbVkpv1rVY=
|
||||
@@ -1223,8 +1240,9 @@ github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
|
||||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE=
|
||||
github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg=
|
||||
github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
|
||||
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-github v17.0.0+incompatible/go.mod h1:zLgOLi98H3fifZn+44m+umXrS52loVEgC2AApnigrVQ=
|
||||
github.com/google/go-github/v32 v32.1.0/go.mod h1:rIEpZD9CTDQwDK9GDrtMTycQNA4JU3qBsCizh3q2WCI=
|
||||
github.com/google/go-querystring v0.0.0-20170111101155-53e6ce116135/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck=
|
||||
@@ -1281,6 +1299,7 @@ github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
|
||||
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/google/wire v0.5.0 h1:I7ELFeVBr3yfPIcc8+MWvrjk+3VjbcSzoXm3JVa+jD8=
|
||||
github.com/google/wire v0.5.0/go.mod h1:ngWDr9Qvq3yZA10YrxfyGELY/AFWGVpy9c1LTRi1EoU=
|
||||
github.com/googleapis/gax-go v2.0.2+incompatible h1:silFMLAnr330+NRuag/VjIGF7TLp/LBrV2CJKFLWEww=
|
||||
github.com/googleapis/gax-go v2.0.2+incompatible/go.mod h1:SFVmujtThgffbyetf+mdk2eWhX2bMyUtNHzFKcPA9HY=
|
||||
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
|
||||
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
|
||||
@@ -1350,12 +1369,13 @@ github.com/grafana/grafana-google-sdk-go v0.0.0-20211104130251-b190293eaf58/go.m
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.94.0/go.mod h1:3VXz4nCv6wH5SfgB3mlW39s+c+LetqSCjFj7xxPC5+M=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.114.0/go.mod h1:D7x3ah+1d4phNXpbnOaxa/osSaZlwh9/ZUnGGzegRbk=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.125.0/go.mod h1:9YiJ5GUxIsIEUC0qR9+BJVP5M7mCSP6uc6Ne62YKkgc=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.129.0/go.mod h1:4edtosZepfQF9jkQwRywJsNSJzXTHmzbmcVcAl8MEQc=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.131.0 h1:8M+Qfch4WNi3PPpRhWtmcLFTCq8zlIjnxrc8iRigAY0=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.131.0/go.mod h1:jmrxelOJKrIK0yrsIzcotS8pbqPZozbmJgGy7k3hK1k=
|
||||
github.com/grafana/loki v1.6.2-0.20211015002020-7832783b1caa h1:+pXjAxavVR2FKKNsuuCXGCWEj8XGc1Af6SPiyBpzU2A=
|
||||
github.com/grafana/loki v1.6.2-0.20211015002020-7832783b1caa/go.mod h1:0O8o/juxNSKN/e+DzWDTRkl7Zm8CkZcz0NDqEdojlrk=
|
||||
github.com/grafana/saml v0.4.9-0.20221202084623-9d456850a65a h1:uNqaITkfBRaaM8mAy+dpuI+hMEdJbZOqM5PLbaUKiUo=
|
||||
github.com/grafana/saml v0.4.9-0.20221202084623-9d456850a65a/go.mod h1:9Zh6dWPtB3MSzTRt8fIFH60Z351QQ+s7hCU3J/tTlA4=
|
||||
github.com/grafana/saml v0.4.13-0.20230331080031-67cbfa09c7b6 h1:oHn/OOUkECNX06DPHksS7R3UY5Qdye04b/sBj2/OJ5E=
|
||||
github.com/grafana/saml v0.4.13-0.20230331080031-67cbfa09c7b6/go.mod h1:igEejV+fihTIlHXYP8zOec3V5A8y3lws5bQBFsTm4gA=
|
||||
github.com/grafana/sqlds/v2 v2.3.2/go.mod h1:34uyqPBWsEvg4V/xxh6V4uIqwu1qLfOfsmScll/ukrk=
|
||||
github.com/grafana/xorm v0.8.3-0.20220614223926-2fcda7565af6 h1:I9dh1MXGX0wGyxdV/Sl7+ugnki4Dfsy8lv2s5Yf887o=
|
||||
github.com/grafana/xorm v0.8.3-0.20220614223926-2fcda7565af6/go.mod h1:ZkJLEYLoVyg7amJK/5r779bHyzs2AU8f8VMiP6BM7uY=
|
||||
@@ -1698,8 +1718,9 @@ github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFB
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
|
||||
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
|
||||
github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0=
|
||||
github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
|
||||
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||
github.com/kr/pty v1.0.0/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA=
|
||||
@@ -2062,9 +2083,11 @@ github.com/philhofer/fwd v1.1.1/go.mod h1:gk3iGcWd9+svBvR0sR+KPcfE+RNWozjowpeBVG
|
||||
github.com/phpdave11/gofpdf v1.4.2/go.mod h1:zpO6xFn9yxo3YLyMvW8HcKWVdbNqgIfOOp2dXMnm1mY=
|
||||
github.com/phpdave11/gofpdi v1.0.7/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI=
|
||||
github.com/phpdave11/gofpdi v1.0.12/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI=
|
||||
github.com/phpdave11/gofpdi v1.0.13/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI=
|
||||
github.com/pierrec/lz4 v1.0.2-0.20190131084431-473cd7ce01a1/go.mod h1:3/3N9NVKO0jef7pBehbT1qWhCMrIgbYNnFAZCqQ5LRc=
|
||||
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
|
||||
github.com/pierrec/lz4 v2.5.2+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
|
||||
github.com/pierrec/lz4 v2.6.0+incompatible h1:Ix9yFKn1nSPBLFl/yZknTp8TU5G4Ps0JDmguYK6iH1A=
|
||||
github.com/pierrec/lz4 v2.6.0+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
|
||||
github.com/pierrec/lz4/v4 v4.1.7/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
|
||||
github.com/pierrec/lz4/v4 v4.1.8 h1:ieHkV+i2BRzngO4Wd/3HGowuZStgq6QkPsD1eolNAO4=
|
||||
@@ -2241,20 +2264,23 @@ github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFR
|
||||
github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
|
||||
github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8=
|
||||
github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
|
||||
github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8=
|
||||
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
|
||||
github.com/rs/cors v1.6.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU=
|
||||
github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU=
|
||||
github.com/rs/cors v1.8.0 h1:P2KMzcFwrPoSjkF1WLRPsp3UMLyql8L4v9hQpVeK5so=
|
||||
github.com/rs/cors v1.8.0/go.mod h1:EBwu+T5AvHOcXwvZIkQFjUN6s8Czyqw12GL/Y0tUyRM=
|
||||
github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=
|
||||
github.com/rs/zerolog v1.4.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU=
|
||||
github.com/russellhaering/goxmldsig v1.1.1 h1:vI0r2osGF1A9PLvsGdPUAGwEIrKa4Pj5sesSBsebIxM=
|
||||
github.com/russellhaering/goxmldsig v1.1.1/go.mod h1:gM4MDENBQf7M+V824SGfyIUVFWydB7n0KkEubVJl+Tw=
|
||||
github.com/russellhaering/goxmldsig v1.2.0 h1:Y6GTTc9Un5hCxSzVz4UIWQ/zuVwDvzJk80guqzwx6Vg=
|
||||
github.com/russellhaering/goxmldsig v1.2.0/go.mod h1:gM4MDENBQf7M+V824SGfyIUVFWydB7n0KkEubVJl+Tw=
|
||||
github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo=
|
||||
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
|
||||
github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q=
|
||||
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfFZQK844Gfx8o5WFuvpxWRwnSoipWe/p622j1v06w=
|
||||
github.com/ruudk/golang-pdf417 v0.0.0-20201230142125-a7e3863a1245/go.mod h1:pQAZKsJ8yyVxGRWYNEm9oFB8ieLgKFnamEyDmSA0BRk=
|
||||
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
|
||||
github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
|
||||
github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc=
|
||||
@@ -2378,8 +2404,10 @@ github.com/streadway/handy v0.0.0-20200128134331-0f66f006fb2e/go.mod h1:qNTQ5P5J
|
||||
github.com/stretchr/objx v0.0.0-20180129172003-8a3f7159479f/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.2.0 h1:Hbg2NidpLE8veEBkEZTL3CvlkUIVzuU9jDplZO54c48=
|
||||
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c=
|
||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||
github.com/stretchr/testify v0.0.0-20151208002404-e3a8ff8ce365/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v0.0.0-20161117074351-18a02ba4a312/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v0.0.0-20180303142811-b89eecf5ca5d/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
@@ -2389,8 +2417,11 @@ github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UV
|
||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
|
||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk=
|
||||
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
|
||||
github.com/syndtr/gocapability v0.0.0-20170704070218-db04d3cc01c8/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww=
|
||||
github.com/syndtr/gocapability v0.0.0-20180916011248-d98352740cb2/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww=
|
||||
@@ -2461,6 +2492,7 @@ github.com/unknwon/com v1.0.1/go.mod h1:tOOxU81rwgoCLoOVVPHb6T/wt8HZygqH5id+GNnl
|
||||
github.com/urfave/cli v0.0.0-20171014202726-7bc6a0acffa5/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA=
|
||||
github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA=
|
||||
github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
|
||||
github.com/urfave/cli v1.22.2 h1:gsqYFH8bb9ekPA12kRo0hfjngWQjkJPlN9R0N78BoUo=
|
||||
github.com/urfave/cli v1.22.2/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
|
||||
github.com/urfave/cli/v2 v2.3.0 h1:qph92Y649prgesehzOrQjdWyxFOp/QVM+6imKHad91M=
|
||||
github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI=
|
||||
@@ -2536,11 +2568,13 @@ github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de
|
||||
github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
||||
github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
||||
github.com/yuin/gopher-lua v0.0.0-20180630135845-46796da1b0b4/go.mod h1:aEV29XrmTYFr3CiRxZeGHpkvbwq+prZduBqMaascyCU=
|
||||
github.com/yuin/gopher-lua v0.0.0-20200816102855-ee81675732da/go.mod h1:E1AXubJBdNmFERAOucpDIxNzeGfLzg0mYh+UfMWdChA=
|
||||
github.com/yvasiyarov/go-metrics v0.0.0-20140926110328-57bccd1ccd43/go.mod h1:aX5oPXxHm3bOH+xeAttToC8pqch2ScQN/JoXYupl6xs=
|
||||
github.com/yvasiyarov/gorelic v0.0.0-20141212073537-a9bba5b9ab50/go.mod h1:NUSPSUX/bi6SeDMUh6brw0nXpxHnc96TguQh0+r/ssA=
|
||||
github.com/yvasiyarov/newrelic_platform_go v0.0.0-20140908184405-b21fdbd4370f/go.mod h1:GlGEuHIJweS1mbCqG+7vt2nvWLzLLnRHbXz5JKd/Qbg=
|
||||
github.com/zenazn/goji v1.0.1/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q=
|
||||
github.com/ziutek/mymysql v1.5.4 h1:GB0qdRGsTwQSBVYuVShFBKaXSnSnYYC2d9knnE1LHFs=
|
||||
github.com/ziutek/mymysql v1.5.4/go.mod h1:LMSpPZ6DbqWFxNCHW77HeMg9I646SAhApZ/wKdgO/C0=
|
||||
gitlab.com/nyarla/go-crypt v0.0.0-20160106005555-d9a5dc2b789b/go.mod h1:T3BPAOm2cqquPa0MKWeNkmOM5RQsRhkrwMWonFMN7fE=
|
||||
@@ -2745,6 +2779,10 @@ golang.org/x/image v0.0.0-20200430140353-33d19683fad8/go.mod h1:FeLwcggjj3mMvU+o
|
||||
golang.org/x/image v0.0.0-20200618115811-c13761719519/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/image v0.0.0-20201208152932-35266b937fa6/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/image v0.0.0-20210216034530-4410531fe030/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/image v0.0.0-20210607152325-775e3b0c77b9/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM=
|
||||
golang.org/x/image v0.0.0-20210628002857-a66eb6448b8d/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM=
|
||||
golang.org/x/image v0.0.0-20211028202545-6944b10bf410/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM=
|
||||
golang.org/x/image v0.0.0-20220302094943-723b81ca9867/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM=
|
||||
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
|
||||
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
@@ -2773,6 +2811,9 @@ golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
|
||||
golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/net v0.0.0-20170114055629-f2499483f923/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180530234432-1e491301e022/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
@@ -3206,6 +3247,8 @@ golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU=
|
||||
golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E=
|
||||
golang.org/x/tools v0.1.11 h1:loJ25fNOEhSXfHrpoGj91eCUThwdNX6u24rO1xnNteY=
|
||||
golang.org/x/tools v0.1.11/go.mod h1:SgwaegtQh8clINPpECJMqnxLv9I09HLqnW3RMqW0CA4=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
@@ -3227,6 +3270,7 @@ gonum.org/v1/netlib v0.0.0-20181029234149-ec6d1f5cefe6/go.mod h1:wa6Ws7BG/ESfp6d
|
||||
gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw=
|
||||
gonum.org/v1/plot v0.0.0-20190515093506-e2840ee46a6b/go.mod h1:Wt8AAjI+ypCyYX3nZBvf6cAIx93T+c/OS2HFAYskSZc=
|
||||
gonum.org/v1/plot v0.9.0/go.mod h1:3Pcqqmp6RHvJI72kgb8fThyUnav364FOsdDo2aGW5lY=
|
||||
gonum.org/v1/plot v0.10.1/go.mod h1:VZW5OlhkL1mysU9vaqNHnsy86inf6Ot+jB3r+BczCEo=
|
||||
google.golang.org/api v0.0.0-20160322025152-9bf6e6e569ff/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=
|
||||
google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk=
|
||||
google.golang.org/api v0.3.2/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk=
|
||||
@@ -3521,6 +3565,7 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh
|
||||
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
|
||||
honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
|
||||
honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
|
||||
honnef.co/go/tools v0.1.3/go.mod h1:NgwopIslSNH47DimFoV78dnkksY2EFtX0ajyb3K/las=
|
||||
honnef.co/go/tools v0.2.0/go.mod h1:lPVVZ2BS5TfnjLyizF7o7hv7j9/L+8cZY2hLyjP9cGY=
|
||||
howett.net/plist v0.0.0-20181124034731-591f970eefbb/go.mod h1:vMygbs4qMhSZSc4lCUl2OEE+rDiIIJAIdR4m7MiMcm0=
|
||||
inet.af/netaddr v0.0.0-20210707202901-70468d781e6c/go.mod h1:z0nx+Dh+7N7CC8V5ayHtHGpZpxLQZZxkIaaz6HN65Ls=
|
||||
|
||||
@@ -4,5 +4,5 @@
|
||||
"packages": [
|
||||
"packages/*"
|
||||
],
|
||||
"version": "8.5.20"
|
||||
"version": "8.5.27"
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"license": "AGPL-3.0-only",
|
||||
"private": true,
|
||||
"name": "grafana",
|
||||
"version": "8.5.20",
|
||||
"version": "8.5.27",
|
||||
"repository": "github:grafana/grafana",
|
||||
"scripts": {
|
||||
"api-tests": "jest --notify --watch --config=devenv/e2e-api-tests/jest.js",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/data",
|
||||
"version": "8.5.20",
|
||||
"version": "8.5.27",
|
||||
"description": "Grafana Data Library",
|
||||
"keywords": [
|
||||
"typescript"
|
||||
@@ -22,7 +22,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@braintree/sanitize-url": "6.0.0",
|
||||
"@grafana/schema": "8.5.20",
|
||||
"@grafana/schema": "8.5.27",
|
||||
"@types/d3-interpolate": "^1.4.0",
|
||||
"d3-interpolate": "1.4.0",
|
||||
"date-fns": "2.28.0",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/e2e-selectors",
|
||||
"version": "8.5.20",
|
||||
"version": "8.5.27",
|
||||
"description": "Grafana End-to-End Test Selectors Library",
|
||||
"keywords": [
|
||||
"cli",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/e2e",
|
||||
"version": "8.5.20",
|
||||
"version": "8.5.27",
|
||||
"description": "Grafana End-to-End Test Library",
|
||||
"keywords": [
|
||||
"cli",
|
||||
@@ -48,7 +48,7 @@
|
||||
"@babel/core": "7.17.8",
|
||||
"@babel/preset-env": "7.16.11",
|
||||
"@cypress/webpack-preprocessor": "5.11.1",
|
||||
"@grafana/e2e-selectors": "8.5.20",
|
||||
"@grafana/e2e-selectors": "8.5.27",
|
||||
"@grafana/tsconfig": "^1.2.0-rc1",
|
||||
"@mochajs/json-file-reporter": "^1.2.0",
|
||||
"babel-loader": "8.2.4",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/runtime",
|
||||
"version": "8.5.20",
|
||||
"version": "8.5.27",
|
||||
"description": "Grafana Runtime Library",
|
||||
"keywords": [
|
||||
"grafana",
|
||||
@@ -22,9 +22,9 @@
|
||||
"typecheck": "tsc --noEmit"
|
||||
},
|
||||
"dependencies": {
|
||||
"@grafana/data": "8.5.20",
|
||||
"@grafana/e2e-selectors": "8.5.20",
|
||||
"@grafana/ui": "8.5.20",
|
||||
"@grafana/data": "8.5.27",
|
||||
"@grafana/e2e-selectors": "8.5.27",
|
||||
"@grafana/ui": "8.5.27",
|
||||
"@sentry/browser": "6.19.1",
|
||||
"history": "4.10.1",
|
||||
"lodash": "4.17.21",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/schema",
|
||||
"version": "8.5.20",
|
||||
"version": "8.5.27",
|
||||
"description": "Grafana Schema Library",
|
||||
"keywords": [
|
||||
"typescript"
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/toolkit",
|
||||
"version": "8.5.20",
|
||||
"version": "8.5.27",
|
||||
"description": "Grafana Toolkit",
|
||||
"keywords": [
|
||||
"grafana",
|
||||
@@ -28,10 +28,10 @@
|
||||
"dependencies": {
|
||||
"@babel/core": "7.13.14",
|
||||
"@babel/preset-env": "7.13.12",
|
||||
"@grafana/data": "8.5.20",
|
||||
"@grafana/data": "8.5.27",
|
||||
"@grafana/eslint-config": "2.5.2",
|
||||
"@grafana/tsconfig": "^1.2.0-rc1",
|
||||
"@grafana/ui": "8.5.20",
|
||||
"@grafana/ui": "8.5.27",
|
||||
"@jest/core": "26.6.3",
|
||||
"@rushstack/eslint-patch": "1.0.6",
|
||||
"@types/command-exists": "^1.2.0",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/ui",
|
||||
"version": "8.5.20",
|
||||
"version": "8.5.27",
|
||||
"description": "Grafana Components Library",
|
||||
"keywords": [
|
||||
"grafana",
|
||||
@@ -33,9 +33,9 @@
|
||||
"@emotion/css": "11.7.1",
|
||||
"@emotion/react": "11.8.2",
|
||||
"@grafana/aws-sdk": "0.0.35",
|
||||
"@grafana/data": "8.5.20",
|
||||
"@grafana/e2e-selectors": "8.5.20",
|
||||
"@grafana/schema": "8.5.20",
|
||||
"@grafana/data": "8.5.27",
|
||||
"@grafana/e2e-selectors": "8.5.27",
|
||||
"@grafana/schema": "8.5.27",
|
||||
"@monaco-editor/react": "4.3.1",
|
||||
"@popperjs/core": "2.11.4",
|
||||
"@react-aria/button": "3.4.3",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@jaegertracing/jaeger-ui-components",
|
||||
"version": "8.5.20",
|
||||
"version": "8.5.27",
|
||||
"main": "src/index.ts",
|
||||
"types": "src/index.ts",
|
||||
"license": "Apache-2.0",
|
||||
@@ -26,8 +26,8 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@emotion/css": "11.7.1",
|
||||
"@grafana/data": "8.5.20",
|
||||
"@grafana/ui": "8.5.20",
|
||||
"@grafana/data": "8.5.27",
|
||||
"@grafana/ui": "8.5.27",
|
||||
"chance": "^1.0.10",
|
||||
"classnames": "^2.2.5",
|
||||
"combokeys": "^3.0.0",
|
||||
|
||||
@@ -102,4 +102,22 @@ describe('<KeyValuesTable>', () => {
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('properly escapes values', () => {
|
||||
const data = [
|
||||
{
|
||||
key: 'jsonkey',
|
||||
value: JSON.stringify({
|
||||
'<img src=x onerror=alert(1)>': '<img src=x onerror=alert(1)>',
|
||||
url: 'https://example.com"id=x tabindex=1 onfocus=alert(1)',
|
||||
}),
|
||||
},
|
||||
];
|
||||
const wrapper = shallow(<KeyValuesTable data={data} />);
|
||||
const el = wrapper.find(`.${ubInlineBlock}`);
|
||||
expect(el.length).toBe(1);
|
||||
expect(el.html().replace(/\n/g, '')).toMatch(
|
||||
`<div class=\"css-7kp13n\"><div class=\"json-markup\">{ <span class=\"json-markup-key\">\"<img src=x onerror=alert(1)>\":</span> <span class=\"json-markup-string\">\"<img src=x onerror=alert(1)>\"</span>, <span class=\"json-markup-key\">\"url\":</span> <span class=\"json-markup-string\">\"<a href=\"https://example.com%22id=x%20tabindex=1%20onfocus=alert(1)\">https://example.com"id=x tabindex=1 onfocus=alert(1)</a>\"</span>}</div></div>`
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -14,7 +14,6 @@
|
||||
|
||||
import { css } from '@emotion/css';
|
||||
import cx from 'classnames';
|
||||
import jsonMarkup from 'json-markup';
|
||||
import * as React from 'react';
|
||||
|
||||
import { GrafanaTheme2 } from '@grafana/data';
|
||||
@@ -26,6 +25,8 @@ import { TNil } from '../../types';
|
||||
import { TraceKeyValuePair, TraceLink } from '../../types/trace';
|
||||
import { ubInlineBlock, uWidth100 } from '../../uberUtilityStyles';
|
||||
|
||||
import jsonMarkup from './jsonMarkup';
|
||||
|
||||
const copyIconClassName = 'copyIcon';
|
||||
|
||||
export const getStyles = (theme: GrafanaTheme2) => {
|
||||
|
||||
@@ -0,0 +1,133 @@
|
||||
// The MIT License (MIT)
|
||||
//
|
||||
// Copyright (c) 2014 Mathias Buus
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included in
|
||||
// all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
// THE SOFTWARE.
|
||||
|
||||
const INDENT = ' ';
|
||||
|
||||
function inlineRule(objRule) {
|
||||
let str = '';
|
||||
objRule &&
|
||||
Object.keys(objRule).forEach(function (rule) {
|
||||
str += rule + ':' + objRule[rule] + ';';
|
||||
});
|
||||
return str;
|
||||
}
|
||||
|
||||
function Stylize(styleFile) {
|
||||
function styleClass(cssClass) {
|
||||
return 'class="' + cssClass + '"';
|
||||
}
|
||||
|
||||
function styleInline(cssClass) {
|
||||
return 'style="' + inlineRule(styleFile['.' + cssClass]) + '"';
|
||||
}
|
||||
|
||||
if (!styleFile) {
|
||||
return styleClass;
|
||||
}
|
||||
return styleInline;
|
||||
}
|
||||
|
||||
function type(doc) {
|
||||
if (doc === null) {
|
||||
return 'null';
|
||||
}
|
||||
if (Array.isArray(doc)) {
|
||||
return 'array';
|
||||
}
|
||||
if (typeof doc === 'string' && /^https?:/.test(doc)) {
|
||||
return 'link';
|
||||
}
|
||||
if (typeof doc === 'object' && typeof doc.toISOString === 'function') {
|
||||
return 'date';
|
||||
}
|
||||
|
||||
return typeof doc;
|
||||
}
|
||||
|
||||
function escape(str) {
|
||||
return str.replace(/&/g, '&').replace(/</g, '<').replace(/>/g, '>').replace(/"/g, '"');
|
||||
}
|
||||
|
||||
module.exports = function (doc, styleFile) {
|
||||
let indent = '';
|
||||
const style = Stylize(styleFile);
|
||||
|
||||
let forEach = function (list, start, end, fn) {
|
||||
if (!list.length) {
|
||||
return start + ' ' + end;
|
||||
}
|
||||
|
||||
let out = start + '\n';
|
||||
|
||||
indent += INDENT;
|
||||
list.forEach(function (key, i) {
|
||||
out += indent + fn(key) + (i < list.length - 1 ? ',' : '') + '\n';
|
||||
});
|
||||
indent = indent.slice(0, -INDENT.length);
|
||||
|
||||
return out + indent + end;
|
||||
};
|
||||
|
||||
function visit(obj) {
|
||||
if (obj === undefined) {
|
||||
return '';
|
||||
}
|
||||
|
||||
switch (type(obj)) {
|
||||
case 'boolean':
|
||||
return '<span ' + style('json-markup-bool') + '>' + obj + '</span>';
|
||||
|
||||
case 'number':
|
||||
return '<span ' + style('json-markup-number') + '>' + obj + '</span>';
|
||||
|
||||
case 'date':
|
||||
return '<span class="json-markup-string">"' + escape(obj.toISOString()) + '"</span>';
|
||||
|
||||
case 'null':
|
||||
return '<span ' + style('json-markup-null') + '>null</span>';
|
||||
|
||||
case 'string':
|
||||
return '<span ' + style('json-markup-string') + '>"' + escape(obj.replace(/\n/g, '\n' + indent)) + '"</span>';
|
||||
|
||||
case 'link':
|
||||
return (
|
||||
'<span ' + style('json-markup-string') + '>"<a href="' + encodeURI(obj) + '">' + escape(obj) + '</a>"</span>'
|
||||
);
|
||||
|
||||
case 'array':
|
||||
return forEach(obj, '[', ']', visit);
|
||||
|
||||
case 'object':
|
||||
const keys = Object.keys(obj).filter(function (key) {
|
||||
return obj[key] !== undefined;
|
||||
});
|
||||
|
||||
return forEach(keys, '{', '}', function (key) {
|
||||
return '<span ' + style('json-markup-key') + '>"' + escape(key) + '":</span> ' + visit(obj[key]);
|
||||
});
|
||||
}
|
||||
|
||||
return '';
|
||||
}
|
||||
|
||||
return '<div ' + style('json-markup') + '>' + visit(doc) + '</div>';
|
||||
};
|
||||
@@ -307,16 +307,17 @@ func (hs *HTTPServer) SyncUser(
|
||||
connect social.SocialConnector,
|
||||
) (*models.User, error) {
|
||||
oauthLogger.Debug("Syncing Grafana user with corresponding OAuth profile")
|
||||
lookupParams := models.UserLookupParams{}
|
||||
if hs.Cfg.OAuthAllowInsecureEmailLookup {
|
||||
lookupParams.Email = &extUser.Email
|
||||
}
|
||||
|
||||
// add/update user in Grafana
|
||||
cmd := &models.UpsertUserCommand{
|
||||
ReqContext: ctx,
|
||||
ExternalUser: extUser,
|
||||
SignupAllowed: connect.IsSignupAllowed(),
|
||||
UserLookupParams: models.UserLookupParams{
|
||||
Email: &extUser.Email,
|
||||
UserID: nil,
|
||||
Login: nil,
|
||||
},
|
||||
ReqContext: ctx,
|
||||
ExternalUser: extUser,
|
||||
SignupAllowed: connect.IsSignupAllowed(),
|
||||
UserLookupParams: lookupParams,
|
||||
}
|
||||
|
||||
if err := hs.Login.UpsertUser(ctx.Req.Context(), cmd); err != nil {
|
||||
|
||||
31
pkg/build/cmd/argcount_wrapper.go
Normal file
31
pkg/build/cmd/argcount_wrapper.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package main
|
||||
|
||||
import "github.com/urfave/cli/v2"
|
||||
|
||||
// ArgCountWrapper will cause the action to fail if there were not exactly `num` args provided.
|
||||
func ArgCountWrapper(num int, action cli.ActionFunc) cli.ActionFunc {
|
||||
return func(ctx *cli.Context) error {
|
||||
if ctx.NArg() != num {
|
||||
if err := cli.ShowSubcommandHelp(ctx); err != nil {
|
||||
return cli.Exit(err.Error(), 1)
|
||||
}
|
||||
return cli.Exit("", 1)
|
||||
}
|
||||
|
||||
return action(ctx)
|
||||
}
|
||||
}
|
||||
|
||||
// ArgCountWrapper will cause the action to fail if there were more than `num` args provided.
|
||||
func MaxArgCountWrapper(max int, action cli.ActionFunc) cli.ActionFunc {
|
||||
return func(ctx *cli.Context) error {
|
||||
if ctx.NArg() > max {
|
||||
if err := cli.ShowSubcommandHelp(ctx); err != nil {
|
||||
return cli.Exit(err.Error(), 1)
|
||||
}
|
||||
return cli.Exit("", 1)
|
||||
}
|
||||
|
||||
return action(ctx)
|
||||
}
|
||||
}
|
||||
59
pkg/build/cmd/flags.go
Normal file
59
pkg/build/cmd/flags.go
Normal file
@@ -0,0 +1,59 @@
|
||||
package main
|
||||
|
||||
import "github.com/urfave/cli/v2"
|
||||
|
||||
var (
|
||||
jobsFlag = cli.IntFlag{
|
||||
Name: "jobs",
|
||||
Usage: "Number of parallel jobs",
|
||||
}
|
||||
buildIDFlag = cli.StringFlag{
|
||||
Name: "build-id",
|
||||
Usage: "Optionally supply a build ID to be part of the version",
|
||||
}
|
||||
editionFlag = cli.StringFlag{
|
||||
Name: "edition",
|
||||
Usage: "The edition of Grafana to build (oss or enterprise)",
|
||||
Value: "oss",
|
||||
}
|
||||
variantsFlag = cli.StringFlag{
|
||||
Name: "variants",
|
||||
Usage: "Comma-separated list of variants to build",
|
||||
}
|
||||
triesFlag = cli.IntFlag{
|
||||
Name: "tries",
|
||||
Usage: "Specify number of tries before failing",
|
||||
Value: 1,
|
||||
}
|
||||
noInstallDepsFlag = cli.BoolFlag{
|
||||
Name: "no-install-deps",
|
||||
Usage: "Don't install dependencies",
|
||||
}
|
||||
signingAdminFlag = cli.BoolFlag{
|
||||
Name: "signing-admin",
|
||||
Usage: "Use manifest signing admin API endpoint?",
|
||||
}
|
||||
signFlag = cli.BoolFlag{
|
||||
Name: "sign",
|
||||
Usage: "Enable plug-in signing (you must set GRAFANA_API_KEY)",
|
||||
}
|
||||
dryRunFlag = cli.BoolFlag{
|
||||
Name: "dry-run",
|
||||
Usage: "Only simulate actions",
|
||||
}
|
||||
gcpKeyFlag = cli.StringFlag{
|
||||
Name: "gcp-key",
|
||||
Usage: "Google Cloud Platform key file",
|
||||
Required: true,
|
||||
}
|
||||
gitHubTokenFlag = cli.StringFlag{
|
||||
Name: "github-token",
|
||||
Value: "",
|
||||
EnvVars: []string{"GITHUB_TOKEN"},
|
||||
Usage: "GitHub token",
|
||||
}
|
||||
tagFlag = cli.StringFlag{
|
||||
Name: "tag",
|
||||
Usage: "Grafana version tag",
|
||||
}
|
||||
)
|
||||
323
pkg/build/cmd/grafanacom.go
Normal file
323
pkg/build/cmd/grafanacom.go
Normal file
@@ -0,0 +1,323 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/urfave/cli/v2"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/config"
|
||||
"github.com/grafana/grafana/pkg/build/gcloud"
|
||||
"github.com/grafana/grafana/pkg/build/gcloud/storage"
|
||||
"github.com/grafana/grafana/pkg/build/packaging"
|
||||
)
|
||||
|
||||
const grafanaAPI = "https://grafana.com/api"
|
||||
|
||||
// GrafanaCom implements the sub-command "grafana-com".
|
||||
func GrafanaCom(c *cli.Context) error {
|
||||
bucketStr := c.String("src-bucket")
|
||||
edition := config.Edition(c.String("edition"))
|
||||
|
||||
if err := gcloud.ActivateServiceAccount(); err != nil {
|
||||
return fmt.Errorf("couldn't activate service account, err: %w", err)
|
||||
}
|
||||
|
||||
metadata, err := config.GenerateMetadata(c)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
releaseMode, err := metadata.GetReleaseMode()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
version := metadata.GrafanaVersion
|
||||
if releaseMode.Mode == config.Cronjob {
|
||||
gcs, err := storage.New()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
bucket := gcs.Bucket(bucketStr)
|
||||
latestMainVersion, err := storage.GetLatestMainBuild(c.Context, bucket, filepath.Join(string(edition), "main"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
version = latestMainVersion
|
||||
}
|
||||
|
||||
dryRun := c.Bool("dry-run")
|
||||
simulateRelease := c.Bool("simulate-release")
|
||||
// Test release mode and dryRun imply simulateRelease
|
||||
if releaseMode.IsTest || dryRun {
|
||||
simulateRelease = true
|
||||
}
|
||||
|
||||
grafanaAPIKey := strings.TrimSpace(os.Getenv("GRAFANA_COM_API_KEY"))
|
||||
if grafanaAPIKey == "" {
|
||||
return cli.Exit("the environment variable GRAFANA_COM_API_KEY must be set", 1)
|
||||
}
|
||||
whatsNewURL, releaseNotesURL, err := getReleaseURLs()
|
||||
if err != nil {
|
||||
return cli.Exit(err.Error(), 1)
|
||||
}
|
||||
|
||||
// TODO: Verify config values
|
||||
cfg := packaging.PublishConfig{
|
||||
Config: config.Config{
|
||||
Version: version,
|
||||
},
|
||||
Edition: edition,
|
||||
ReleaseMode: releaseMode,
|
||||
GrafanaAPIKey: grafanaAPIKey,
|
||||
WhatsNewURL: whatsNewURL,
|
||||
ReleaseNotesURL: releaseNotesURL,
|
||||
DryRun: dryRun,
|
||||
TTL: c.String("ttl"),
|
||||
SimulateRelease: simulateRelease,
|
||||
}
|
||||
|
||||
if err := publishPackages(cfg); err != nil {
|
||||
return cli.Exit(err.Error(), 1)
|
||||
}
|
||||
|
||||
log.Println("Successfully published packages to grafana.com!")
|
||||
return nil
|
||||
}
|
||||
|
||||
func getReleaseURLs() (string, string, error) {
|
||||
type grafanaConf struct {
|
||||
WhatsNewURL string `json:"whatsNewUrl"`
|
||||
ReleaseNotesURL string `json:"releaseNotesUrl"`
|
||||
}
|
||||
type packageConf struct {
|
||||
Grafana grafanaConf `json:"grafana"`
|
||||
}
|
||||
|
||||
pkgB, err := os.ReadFile("package.json")
|
||||
if err != nil {
|
||||
return "", "", fmt.Errorf("failed to read package.json: %w", err)
|
||||
}
|
||||
|
||||
var pconf packageConf
|
||||
if err := json.Unmarshal(pkgB, &pconf); err != nil {
|
||||
return "", "", fmt.Errorf("failed to decode package.json: %w", err)
|
||||
}
|
||||
if _, err := url.ParseRequestURI(pconf.Grafana.WhatsNewURL); err != nil {
|
||||
return "", "", fmt.Errorf("grafana.whatsNewUrl is invalid in package.json: %q", pconf.Grafana.WhatsNewURL)
|
||||
}
|
||||
if _, err := url.ParseRequestURI(pconf.Grafana.ReleaseNotesURL); err != nil {
|
||||
return "", "", fmt.Errorf("grafana.releaseNotesUrl is invalid in package.json: %q",
|
||||
pconf.Grafana.ReleaseNotesURL)
|
||||
}
|
||||
|
||||
return pconf.Grafana.WhatsNewURL, pconf.Grafana.ReleaseNotesURL, nil
|
||||
}
|
||||
|
||||
// publishPackages publishes packages to grafana.com.
|
||||
func publishPackages(cfg packaging.PublishConfig) error {
|
||||
log.Printf("Publishing Grafana packages, version %s, %s edition, %s mode, dryRun: %v, simulating: %v...\n",
|
||||
cfg.Version, cfg.Edition, cfg.ReleaseMode.Mode, cfg.DryRun, cfg.SimulateRelease)
|
||||
|
||||
versionStr := fmt.Sprintf("v%s", cfg.Version)
|
||||
log.Printf("Creating release %s at grafana.com...\n", versionStr)
|
||||
|
||||
var sfx string
|
||||
var pth string
|
||||
switch cfg.Edition {
|
||||
case config.EditionOSS:
|
||||
pth = "oss"
|
||||
case config.EditionEnterprise:
|
||||
pth = "enterprise"
|
||||
sfx = packaging.EnterpriseSfx
|
||||
default:
|
||||
return fmt.Errorf("unrecognized edition %q", cfg.Edition)
|
||||
}
|
||||
|
||||
switch cfg.ReleaseMode.Mode {
|
||||
case config.MainMode, config.DownstreamMode, config.CronjobMode:
|
||||
pth = path.Join(pth, packaging.MainFolder)
|
||||
default:
|
||||
pth = path.Join(pth, packaging.ReleaseFolder)
|
||||
}
|
||||
|
||||
product := fmt.Sprintf("grafana%s", sfx)
|
||||
pth = path.Join(pth, product)
|
||||
baseArchiveURL := fmt.Sprintf("https://dl.grafana.com/%s", pth)
|
||||
|
||||
var builds []buildRepr
|
||||
for _, ba := range packaging.ArtifactConfigs {
|
||||
u := ba.GetURL(baseArchiveURL, cfg)
|
||||
|
||||
sha256, err := getSHA256(u)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
builds = append(builds, buildRepr{
|
||||
OS: ba.Os,
|
||||
URL: u,
|
||||
SHA256: string(sha256),
|
||||
Arch: ba.Arch,
|
||||
})
|
||||
}
|
||||
|
||||
r := releaseRepr{
|
||||
Version: cfg.Version,
|
||||
ReleaseDate: time.Now().UTC(),
|
||||
Builds: builds,
|
||||
Stable: cfg.ReleaseMode.Mode == config.TagMode && !cfg.ReleaseMode.IsBeta && !cfg.ReleaseMode.IsTest,
|
||||
Beta: cfg.ReleaseMode.IsBeta,
|
||||
Nightly: cfg.ReleaseMode.Mode == config.CronjobMode,
|
||||
}
|
||||
if cfg.ReleaseMode.Mode == config.TagMode || r.Beta {
|
||||
r.WhatsNewURL = cfg.WhatsNewURL
|
||||
r.ReleaseNotesURL = cfg.ReleaseNotesURL
|
||||
}
|
||||
|
||||
if err := postRequest(cfg, "versions", r, fmt.Sprintf("create release %s", r.Version)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := postRequest(cfg, fmt.Sprintf("versions/%s", cfg.Version), r,
|
||||
fmt.Sprintf("update release %s", cfg.Version)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, b := range r.Builds {
|
||||
if err := postRequest(cfg, fmt.Sprintf("versions/%s/packages", cfg.Version), b,
|
||||
fmt.Sprintf("create build %s %s", b.OS, b.Arch)); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := postRequest(cfg, fmt.Sprintf("versions/%s/packages/%s/%s", cfg.Version, b.Arch, b.OS), b,
|
||||
fmt.Sprintf("update build %s %s", b.OS, b.Arch)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func getSHA256(u string) ([]byte, error) {
|
||||
shaURL := fmt.Sprintf("%s.sha256", u)
|
||||
// nolint:gosec
|
||||
resp, err := http.Get(shaURL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func() {
|
||||
if err := resp.Body.Close(); err != nil {
|
||||
log.Println("failed to close response body, err: %w", err)
|
||||
}
|
||||
}()
|
||||
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
|
||||
return nil, fmt.Errorf("failed downloading %s: %s", u, resp.Status)
|
||||
}
|
||||
|
||||
sha256, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return sha256, nil
|
||||
}
|
||||
|
||||
func postRequest(cfg packaging.PublishConfig, pth string, obj interface{}, descr string) error {
|
||||
var sfx string
|
||||
switch cfg.Edition {
|
||||
case config.EditionOSS:
|
||||
case config.EditionEnterprise:
|
||||
sfx = packaging.EnterpriseSfx
|
||||
default:
|
||||
return fmt.Errorf("unrecognized edition %q", cfg.Edition)
|
||||
}
|
||||
product := fmt.Sprintf("grafana%s", sfx)
|
||||
|
||||
jsonB, err := json.Marshal(obj)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to JSON encode release: %w", err)
|
||||
}
|
||||
|
||||
u, err := constructURL(product, pth)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req, err := http.NewRequest(http.MethodPost, u, bytes.NewReader(jsonB))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", cfg.GrafanaAPIKey))
|
||||
req.Header.Add("Content-Type", "application/json")
|
||||
|
||||
log.Printf("Posting to grafana.com API, %s - JSON: %s\n", u, string(jsonB))
|
||||
if cfg.SimulateRelease {
|
||||
log.Println("Only simulating request")
|
||||
return nil
|
||||
}
|
||||
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed posting to %s (%s): %s", u, descr, err)
|
||||
}
|
||||
defer func() {
|
||||
if err := resp.Body.Close(); err != nil {
|
||||
log.Println("failed to close response body, err: %w", err)
|
||||
}
|
||||
}()
|
||||
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if strings.Contains(string(body), "already exists") || strings.Contains(string(body), "Nothing to update") {
|
||||
log.Printf("Already exists: %s\n", descr)
|
||||
return nil
|
||||
}
|
||||
|
||||
return fmt.Errorf("failed posting to %s (%s): %s", u, descr, resp.Status)
|
||||
}
|
||||
|
||||
log.Printf("Successfully posted to grafana.com API, %s\n", u)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func constructURL(product string, pth string) (string, error) {
|
||||
productPath := filepath.Clean(filepath.Join("/", product, pth))
|
||||
u, err := url.Parse(grafanaAPI)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
u.Path = path.Join(u.Path, productPath)
|
||||
return u.String(), err
|
||||
}
|
||||
|
||||
type buildRepr struct {
|
||||
OS string `json:"os"`
|
||||
URL string `json:"url"`
|
||||
SHA256 string `json:"sha256"`
|
||||
Arch string `json:"arch"`
|
||||
}
|
||||
|
||||
type releaseRepr struct {
|
||||
Version string `json:"version"`
|
||||
ReleaseDate time.Time `json:"releaseDate"`
|
||||
Stable bool `json:"stable"`
|
||||
Beta bool `json:"beta"`
|
||||
Nightly bool `json:"nightly"`
|
||||
WhatsNewURL string `json:"whatsNewUrl"`
|
||||
ReleaseNotesURL string `json:"releaseNotesUrl"`
|
||||
Builds []buildRepr `json:"-"`
|
||||
}
|
||||
35
pkg/build/cmd/grafanacom_test.go
Normal file
35
pkg/build/cmd/grafanacom_test.go
Normal file
@@ -0,0 +1,35 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func Test_constructURL(t *testing.T) {
|
||||
type args struct {
|
||||
product string
|
||||
pth string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
want string
|
||||
wantErr bool
|
||||
}{
|
||||
{name: "cleans .. sequence", args: args{"..", ".."}, want: "https://grafana.com/api", wantErr: false},
|
||||
{name: "doesn't clean anything - non malicious url", args: args{"foo", "bar"}, want: "https://grafana.com/api/foo/bar", wantErr: false},
|
||||
{name: "doesn't clean anything - three dots", args: args{"...", "..."}, want: "https://grafana.com/api/.../...", wantErr: false},
|
||||
{name: "cleans .", args: args{"..", ".."}, want: "https://grafana.com/api", wantErr: false},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got, err := constructURL(tt.args.product, tt.args.pth)
|
||||
if (err != nil) != tt.wantErr {
|
||||
t.Errorf("constructURL() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
if got != tt.want {
|
||||
t.Errorf("constructURL() got = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
48
pkg/build/cmd/main.go
Normal file
48
pkg/build/cmd/main.go
Normal file
@@ -0,0 +1,48 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
||||
var additionalCommands []*cli.Command = make([]*cli.Command, 0, 5)
|
||||
|
||||
//nolint:unused
|
||||
func registerAppCommand(c *cli.Command) {
|
||||
additionalCommands = append(additionalCommands, c)
|
||||
}
|
||||
|
||||
func main() {
|
||||
app := cli.NewApp()
|
||||
app.Commands = cli.Commands{
|
||||
{
|
||||
Name: "publish",
|
||||
Usage: "Publish packages to Grafana com and repositories",
|
||||
Subcommands: cli.Commands{
|
||||
{
|
||||
Name: "grafana-com",
|
||||
Usage: "Publish packages to grafana.com",
|
||||
Action: GrafanaCom,
|
||||
Flags: []cli.Flag{
|
||||
&editionFlag,
|
||||
&buildIDFlag,
|
||||
&dryRunFlag,
|
||||
&cli.StringFlag{
|
||||
Name: "src-bucket",
|
||||
Value: "grafana-downloads",
|
||||
Usage: "Google Cloud Storage bucket",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
app.Commands = append(app.Commands, additionalCommands...)
|
||||
|
||||
if err := app.Run(os.Args); err != nil {
|
||||
log.Fatalln(err)
|
||||
}
|
||||
}
|
||||
50
pkg/build/compilers/install.go
Normal file
50
pkg/build/compilers/install.go
Normal file
@@ -0,0 +1,50 @@
|
||||
package compilers
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
const (
|
||||
ArmV6 = "/opt/rpi-tools/arm-bcm2708/arm-linux-gnueabihf/bin/arm-linux-gnueabihf-gcc"
|
||||
Armv7 = "arm-linux-gnueabihf-gcc"
|
||||
Armv7Musl = "/tmp/arm-linux-musleabihf-cross/bin/arm-linux-musleabihf-gcc"
|
||||
Arm64 = "aarch64-linux-gnu-gcc"
|
||||
Arm64Musl = "/tmp/aarch64-linux-musl-cross/bin/aarch64-linux-musl-gcc"
|
||||
Osx64 = "/tmp/osxcross/target/bin/o64-clang"
|
||||
Win64 = "x86_64-w64-mingw32-gcc"
|
||||
LinuxX64 = "/tmp/x86_64-centos6-linux-gnu/bin/x86_64-centos6-linux-gnu-gcc"
|
||||
LinuxX64Musl = "/tmp/x86_64-linux-musl-cross/bin/x86_64-linux-musl-gcc"
|
||||
)
|
||||
|
||||
func Install() error {
|
||||
// From the os.TempDir documentation:
|
||||
// On Unix systems, it returns $TMPDIR if non-empty,
|
||||
// else /tmp. On Windows, it uses GetTempPath,
|
||||
// returning the first non-empty value from %TMP%, %TEMP%, %USERPROFILE%,
|
||||
// or the Windows directory. On Plan 9, it returns /tmp.
|
||||
tmp := os.TempDir()
|
||||
|
||||
var (
|
||||
centosArchive = "x86_64-centos6-linux-gnu.tar.xz"
|
||||
osxArchive = "osxcross.tar.xz"
|
||||
)
|
||||
|
||||
for _, fname := range []string{centosArchive, osxArchive} {
|
||||
path := filepath.Join(tmp, fname)
|
||||
if _, err := os.Stat(path); err != nil {
|
||||
return fmt.Errorf("stat error: %w", err)
|
||||
}
|
||||
// Ignore gosec G204 as this function is only used in the build process.
|
||||
//nolint:gosec
|
||||
cmd := exec.Command("tar", "xfJ", fname)
|
||||
cmd.Dir = tmp
|
||||
if output, err := cmd.CombinedOutput(); err != nil {
|
||||
return fmt.Errorf("failed to unpack %q: %q, %w", fname, output, err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
18
pkg/build/config/config.go
Normal file
18
pkg/build/config/config.go
Normal file
@@ -0,0 +1,18 @@
|
||||
package config
|
||||
|
||||
type Config struct {
|
||||
Version string
|
||||
Bucket string
|
||||
DebRepoBucket string
|
||||
DebDBBucket string
|
||||
RPMRepoBucket string
|
||||
GPGPassPath string
|
||||
GPGPrivateKey string
|
||||
GPGPublicKey string
|
||||
NumWorkers int
|
||||
GitHubUser string
|
||||
GitHubToken string
|
||||
PullEnterprise bool
|
||||
PackageVersion string
|
||||
SignPackages bool
|
||||
}
|
||||
9
pkg/build/config/edition.go
Normal file
9
pkg/build/config/edition.go
Normal file
@@ -0,0 +1,9 @@
|
||||
package config
|
||||
|
||||
type Edition string
|
||||
|
||||
const (
|
||||
EditionOSS Edition = "oss"
|
||||
EditionEnterprise Edition = "enterprise"
|
||||
EditionEnterprise2 Edition = "enterprise2"
|
||||
)
|
||||
102
pkg/build/config/genmetadata.go
Normal file
102
pkg/build/config/genmetadata.go
Normal file
@@ -0,0 +1,102 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/droneutil"
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
||||
func GenerateMetadata(c *cli.Context) (Metadata, error) {
|
||||
var metadata Metadata
|
||||
version := ""
|
||||
|
||||
event, err := droneutil.GetDroneEventFromEnv()
|
||||
if err != nil {
|
||||
return Metadata{}, err
|
||||
}
|
||||
|
||||
tag, ok := os.LookupEnv("DRONE_TAG")
|
||||
if !ok {
|
||||
fmt.Println("DRONE_TAG envvar not present, %w", err)
|
||||
}
|
||||
|
||||
var releaseMode ReleaseMode
|
||||
switch event {
|
||||
case string(PullRequestMode):
|
||||
releaseMode = ReleaseMode{Mode: PullRequestMode}
|
||||
case Push:
|
||||
mode, err := CheckDroneTargetBranch()
|
||||
if err != nil {
|
||||
return Metadata{}, err
|
||||
}
|
||||
releaseMode = ReleaseMode{Mode: mode}
|
||||
case Custom:
|
||||
if edition, _ := os.LookupEnv("EDITION"); edition == string(EditionEnterprise2) {
|
||||
releaseMode = ReleaseMode{Mode: Enterprise2Mode}
|
||||
if tag != "" {
|
||||
version = strings.TrimPrefix(tag, "v")
|
||||
}
|
||||
break
|
||||
}
|
||||
mode, err := CheckDroneTargetBranch()
|
||||
if err != nil {
|
||||
return Metadata{}, err
|
||||
}
|
||||
// if there is a custom event targeting the main branch, that's an enterprise downstream build
|
||||
if mode == MainBranch {
|
||||
releaseMode = ReleaseMode{Mode: DownstreamMode}
|
||||
} else {
|
||||
releaseMode = ReleaseMode{Mode: mode}
|
||||
}
|
||||
case Tag, Promote:
|
||||
if tag == "" {
|
||||
return Metadata{}, fmt.Errorf("DRONE_TAG envvar not present for a tag/promotion event, %w", err)
|
||||
}
|
||||
version = strings.TrimPrefix(tag, "v")
|
||||
mode, err := CheckSemverSuffix()
|
||||
if err != nil {
|
||||
return Metadata{}, err
|
||||
}
|
||||
releaseMode = mode
|
||||
case Cronjob:
|
||||
releaseMode = ReleaseMode{Mode: CronjobMode}
|
||||
}
|
||||
|
||||
if version == "" {
|
||||
version, err = generateVersionFromBuildID()
|
||||
if err != nil {
|
||||
return Metadata{}, err
|
||||
}
|
||||
}
|
||||
|
||||
currentCommit, err := GetDroneCommit()
|
||||
if err != nil {
|
||||
return Metadata{}, err
|
||||
}
|
||||
metadata = Metadata{
|
||||
GrafanaVersion: version,
|
||||
ReleaseMode: releaseMode,
|
||||
GrabplVersion: c.App.Version,
|
||||
CurrentCommit: currentCommit,
|
||||
}
|
||||
|
||||
fmt.Printf("building Grafana version: %s, release mode: %+v", metadata.GrafanaVersion, metadata.ReleaseMode)
|
||||
|
||||
return metadata, nil
|
||||
}
|
||||
|
||||
func generateVersionFromBuildID() (string, error) {
|
||||
buildID, ok := os.LookupEnv("DRONE_BUILD_NUMBER")
|
||||
if !ok {
|
||||
return "", fmt.Errorf("unable to get DRONE_BUILD_NUMBER environmental variable")
|
||||
}
|
||||
var err error
|
||||
version, err := GetGrafanaVersion(buildID, ".")
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return version, nil
|
||||
}
|
||||
81
pkg/build/config/genmetadata_test.go
Normal file
81
pkg/build/config/genmetadata_test.go
Normal file
@@ -0,0 +1,81 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
||||
const (
|
||||
DroneBuildEvent = "DRONE_BUILD_EVENT"
|
||||
DroneTargetBranch = "DRONE_TARGET_BRANCH"
|
||||
DroneTag = "DRONE_TAG"
|
||||
DroneSemverPrerelease = "DRONE_SEMVER_PRERELEASE"
|
||||
DroneBuildNumber = "DRONE_BUILD_NUMBER"
|
||||
)
|
||||
|
||||
const (
|
||||
hashedGrafanaVersion = "9.2.0-12345pre"
|
||||
versionedBranch = "v9.2.x"
|
||||
)
|
||||
|
||||
func TestGetMetadata(t *testing.T) {
|
||||
tcs := []struct {
|
||||
envMap map[string]string
|
||||
expVersion string
|
||||
mode ReleaseMode
|
||||
}{
|
||||
{map[string]string{DroneBuildEvent: PullRequest, DroneTargetBranch: "", DroneTag: "", DroneSemverPrerelease: "", DroneBuildNumber: "12345"}, hashedGrafanaVersion, ReleaseMode{Mode: PullRequestMode}},
|
||||
{map[string]string{DroneBuildEvent: Push, DroneTargetBranch: versionedBranch, DroneTag: "", DroneSemverPrerelease: "", DroneBuildNumber: "12345"}, hashedGrafanaVersion, ReleaseMode{Mode: ReleaseBranchMode}},
|
||||
{map[string]string{DroneBuildEvent: Push, DroneTargetBranch: MainBranch, DroneTag: "", DroneSemverPrerelease: "", DroneBuildNumber: "12345"}, hashedGrafanaVersion, ReleaseMode{Mode: MainMode}},
|
||||
{map[string]string{DroneBuildEvent: Custom, DroneTargetBranch: versionedBranch, DroneTag: "", DroneSemverPrerelease: "", DroneBuildNumber: "12345"}, hashedGrafanaVersion, ReleaseMode{Mode: ReleaseBranchMode}},
|
||||
{map[string]string{DroneBuildEvent: Custom, DroneTargetBranch: MainBranch, DroneTag: "", DroneSemverPrerelease: "", DroneBuildNumber: "12345"}, hashedGrafanaVersion, ReleaseMode{Mode: DownstreamMode}},
|
||||
{map[string]string{DroneBuildEvent: Custom, DroneTargetBranch: MainBranch, DroneTag: "", DroneSemverPrerelease: "", DroneBuildNumber: "12345", "EDITION": string(EditionEnterprise2)}, hashedGrafanaVersion, ReleaseMode{Mode: Enterprise2Mode}},
|
||||
{map[string]string{DroneBuildEvent: Tag, DroneTargetBranch: "", DroneTag: "v9.2.0", DroneSemverPrerelease: "", DroneBuildNumber: "12345"}, "9.2.0", ReleaseMode{Mode: TagMode, IsBeta: false, IsTest: false}},
|
||||
{map[string]string{DroneBuildEvent: Tag, DroneTargetBranch: "", DroneTag: "v9.2.0-beta", DroneSemverPrerelease: "beta", DroneBuildNumber: "12345"}, "9.2.0-beta", ReleaseMode{Mode: TagMode, IsBeta: true, IsTest: false}},
|
||||
{map[string]string{DroneBuildEvent: Tag, DroneTargetBranch: "", DroneTag: "v9.2.0-test", DroneSemverPrerelease: "test", DroneBuildNumber: "12345"}, "9.2.0-test", ReleaseMode{Mode: TagMode, IsBeta: false, IsTest: true}},
|
||||
{map[string]string{DroneBuildEvent: Promote, DroneTargetBranch: "", DroneTag: "v9.2.0", DroneSemverPrerelease: "", DroneBuildNumber: "12345"}, "9.2.0", ReleaseMode{Mode: TagMode, IsBeta: false, IsTest: false}},
|
||||
{map[string]string{DroneBuildEvent: Promote, DroneTargetBranch: "", DroneTag: "v9.2.0-beta", DroneSemverPrerelease: "beta", DroneBuildNumber: "12345"}, "9.2.0-beta", ReleaseMode{Mode: TagMode, IsBeta: true, IsTest: false}},
|
||||
{map[string]string{DroneBuildEvent: Promote, DroneTargetBranch: "", DroneTag: "v9.2.0-test", DroneSemverPrerelease: "test", DroneBuildNumber: "12345"}, "9.2.0-test", ReleaseMode{Mode: TagMode, IsBeta: false, IsTest: true}},
|
||||
}
|
||||
|
||||
ctx := cli.NewContext(cli.NewApp(), &flag.FlagSet{}, nil)
|
||||
for _, tc := range tcs {
|
||||
t.Run("Should return valid metadata, ", func(t *testing.T) {
|
||||
setUpEnv(t, tc.envMap)
|
||||
testMetadata(t, ctx, tc.expVersion, tc.mode)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func testMetadata(t *testing.T, ctx *cli.Context, version string, releaseMode ReleaseMode) {
|
||||
t.Helper()
|
||||
|
||||
metadata, err := GenerateMetadata(ctx)
|
||||
require.NoError(t, err)
|
||||
t.Run("with a valid version", func(t *testing.T) {
|
||||
expVersion := metadata.GrafanaVersion
|
||||
require.Equal(t, expVersion, version)
|
||||
})
|
||||
|
||||
t.Run("with a valid release mode from the built-in list", func(t *testing.T) {
|
||||
expMode := metadata.ReleaseMode
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, expMode, releaseMode)
|
||||
})
|
||||
}
|
||||
|
||||
func setUpEnv(t *testing.T, envMap map[string]string) {
|
||||
t.Helper()
|
||||
|
||||
os.Clearenv()
|
||||
err := os.Setenv("DRONE_COMMIT", "abcd12345")
|
||||
require.NoError(t, err)
|
||||
for k, v := range envMap {
|
||||
err := os.Setenv(k, v)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
}
|
||||
3
pkg/build/config/package.json
Normal file
3
pkg/build/config/package.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"version": "9.2.0-pre"
|
||||
}
|
||||
55
pkg/build/config/revision.go
Normal file
55
pkg/build/config/revision.go
Normal file
@@ -0,0 +1,55 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/executil"
|
||||
)
|
||||
|
||||
type Revision struct {
|
||||
Timestamp int64
|
||||
SHA256 string
|
||||
Branch string
|
||||
}
|
||||
|
||||
func GrafanaTimestamp(ctx context.Context, dir string) (int64, error) {
|
||||
out, err := executil.OutputAt(ctx, dir, "git", "show", "-s", "--format=%ct")
|
||||
if err != nil {
|
||||
return time.Now().Unix(), nil
|
||||
}
|
||||
|
||||
stamp, err := strconv.ParseInt(out, 10, 64)
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("failed to parse output from git show: %q", out)
|
||||
}
|
||||
|
||||
return stamp, nil
|
||||
}
|
||||
|
||||
// GrafanaRevision uses git commands to get information about the checked out Grafana code located at 'grafanaDir'.
|
||||
// This could maybe be a more generic "Describe" function in the "git" package.
|
||||
func GrafanaRevision(ctx context.Context, grafanaDir string) (Revision, error) {
|
||||
stamp, err := GrafanaTimestamp(ctx, grafanaDir)
|
||||
if err != nil {
|
||||
return Revision{}, err
|
||||
}
|
||||
|
||||
sha, err := executil.OutputAt(ctx, grafanaDir, "git", "rev-parse", "--short", "HEAD")
|
||||
if err != nil {
|
||||
return Revision{}, err
|
||||
}
|
||||
|
||||
branch, err := executil.OutputAt(ctx, grafanaDir, "git", "rev-parse", "--abbrev-ref", "HEAD")
|
||||
if err != nil {
|
||||
return Revision{}, err
|
||||
}
|
||||
|
||||
return Revision{
|
||||
SHA256: sha,
|
||||
Branch: branch,
|
||||
Timestamp: stamp,
|
||||
}, nil
|
||||
}
|
||||
63
pkg/build/config/variant.go
Normal file
63
pkg/build/config/variant.go
Normal file
@@ -0,0 +1,63 @@
|
||||
package config
|
||||
|
||||
// Variant is the OS / Architecture combination that Grafana can be compiled for.
|
||||
type Variant string
|
||||
|
||||
const (
|
||||
VariantLinuxAmd64 Variant = "linux-amd64"
|
||||
VariantLinuxAmd64Musl Variant = "linux-amd64-musl"
|
||||
VariantArmV6 Variant = "linux-armv6"
|
||||
VariantArmV7 Variant = "linux-armv7"
|
||||
VariantArmV7Musl Variant = "linux-armv7-musl"
|
||||
VariantArm64 Variant = "linux-arm64"
|
||||
VariantArm64Musl Variant = "linux-arm64-musl"
|
||||
VariantDarwinAmd64 Variant = "darwin-amd64"
|
||||
VariantWindowsAmd64 Variant = "windows-amd64"
|
||||
)
|
||||
|
||||
var AllVariants = []Variant{
|
||||
VariantArmV6,
|
||||
VariantArmV7,
|
||||
VariantArmV7Musl,
|
||||
VariantArm64,
|
||||
VariantArm64Musl,
|
||||
VariantDarwinAmd64,
|
||||
VariantWindowsAmd64,
|
||||
VariantLinuxAmd64,
|
||||
VariantLinuxAmd64Musl,
|
||||
}
|
||||
|
||||
// Architecture is an allowed value in the GOARCH environment variable.
|
||||
type Architecture string
|
||||
|
||||
const (
|
||||
ArchAMD64 Architecture = "amd64"
|
||||
ArchARMv6 Architecture = "armv6"
|
||||
ArchARMv7 Architecture = "armv7"
|
||||
ArchARM64 Architecture = "arm64"
|
||||
ArchARMHF Architecture = "armhf"
|
||||
ArchARMHFP Architecture = "armhfp"
|
||||
ArchARM Architecture = "arm"
|
||||
)
|
||||
|
||||
type OS string
|
||||
|
||||
const (
|
||||
OSWindows OS = "windows"
|
||||
OSDarwin OS = "darwin"
|
||||
OSLinux OS = "linux"
|
||||
)
|
||||
|
||||
type LibC string
|
||||
|
||||
const (
|
||||
LibCMusl = "musl"
|
||||
)
|
||||
|
||||
// Distribution is the base os image where the Grafana image is built on.
|
||||
type Distribution string
|
||||
|
||||
const (
|
||||
Ubuntu Distribution = "ubuntu"
|
||||
Alpine Distribution = "alpine"
|
||||
)
|
||||
157
pkg/build/config/version.go
Normal file
157
pkg/build/config/version.go
Normal file
@@ -0,0 +1,157 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/git"
|
||||
)
|
||||
|
||||
type Metadata struct {
|
||||
GrafanaVersion string `json:"version,omitempty"`
|
||||
ReleaseMode ReleaseMode `json:"releaseMode,omitempty"`
|
||||
GrabplVersion string `json:"grabplVersion,omitempty"`
|
||||
CurrentCommit string `json:"currentCommit,omitempty"`
|
||||
}
|
||||
|
||||
type ReleaseMode struct {
|
||||
Mode VersionMode `json:"mode,omitempty"`
|
||||
IsBeta bool `json:"isBeta,omitempty"`
|
||||
IsTest bool `json:"isTest,omitempty"`
|
||||
}
|
||||
|
||||
type PluginSignature struct {
|
||||
Sign bool `json:"sign,omitempty"`
|
||||
AdminSign bool `json:"adminSign,omitempty"`
|
||||
}
|
||||
|
||||
type Docker struct {
|
||||
ShouldSave bool `json:"shouldSave,omitempty"`
|
||||
Distribution []Distribution `json:"distribution,omitempty"`
|
||||
Architectures []Architecture `json:"archs,omitempty"`
|
||||
PrereleaseBucket string `json:"prereleaseBucket,omitempty"`
|
||||
}
|
||||
|
||||
type Buckets struct {
|
||||
Artifacts string `json:"artifacts,omitempty"`
|
||||
ArtifactsEnterprise2 string `json:"artifactsEnterprise2,omitempty"`
|
||||
CDNAssets string `json:"CDNAssets,omitempty"`
|
||||
CDNAssetsDir string `json:"CDNAssetsDir,omitempty"`
|
||||
Storybook string `json:"storybook,omitempty"`
|
||||
StorybookSrcDir string `json:"storybookSrcDir,omitempty"`
|
||||
}
|
||||
|
||||
// BuildConfig represents the struct that defines all of the different variables used to build Grafana
|
||||
type BuildConfig struct {
|
||||
Variants []Variant `json:"variants,omitempty"`
|
||||
PluginSignature PluginSignature `json:"pluginSignature,omitempty"`
|
||||
Docker Docker `json:"docker,omitempty"`
|
||||
Buckets Buckets `json:"buckets,omitempty"`
|
||||
}
|
||||
|
||||
func (md *Metadata) GetReleaseMode() (ReleaseMode, error) {
|
||||
return md.ReleaseMode, nil
|
||||
}
|
||||
|
||||
// VersionMap is a map of versions. Each key of the Versions map is an event that uses the the config as the value for that key.
|
||||
// For example, the 'pull_request' key will have data in it that might cause Grafana to be built differently in a pull request,
|
||||
// than the way it will be built in 'main'
|
||||
type VersionMap map[VersionMode]BuildConfig
|
||||
|
||||
// GetBuildConfig reads the embedded config.json and decodes it.
|
||||
func GetBuildConfig(mode VersionMode) (*BuildConfig, error) {
|
||||
if v, ok := Versions[mode]; ok {
|
||||
return &v, nil
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("mode '%s' not found in version list", mode)
|
||||
}
|
||||
|
||||
// GetGrafanaVersion gets the Grafana version from the package.json
|
||||
func GetGrafanaVersion(buildID, grafanaDir string) (string, error) {
|
||||
pkgJSONPath := filepath.Join(grafanaDir, "package.json")
|
||||
//nolint:gosec
|
||||
pkgJSONB, err := os.ReadFile(pkgJSONPath)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to read %q: %w", pkgJSONPath, err)
|
||||
}
|
||||
pkgObj := map[string]interface{}{}
|
||||
if err := json.Unmarshal(pkgJSONB, &pkgObj); err != nil {
|
||||
return "", fmt.Errorf("failed decoding %q: %w", pkgJSONPath, err)
|
||||
}
|
||||
|
||||
version := pkgObj["version"].(string)
|
||||
if version == "" {
|
||||
return "", fmt.Errorf("failed to read version from %q", pkgJSONPath)
|
||||
}
|
||||
if buildID != "" {
|
||||
buildID = shortenBuildID(buildID)
|
||||
verComponents := strings.Split(version, "-")
|
||||
version = verComponents[0]
|
||||
if len(verComponents) > 1 {
|
||||
buildID = fmt.Sprintf("%s%s", buildID, verComponents[1])
|
||||
}
|
||||
version = fmt.Sprintf("%s-%s", version, buildID)
|
||||
}
|
||||
|
||||
return version, nil
|
||||
}
|
||||
|
||||
func CheckDroneTargetBranch() (VersionMode, error) {
|
||||
rePRCheckBranch := git.PRCheckRegexp()
|
||||
reRlsBranch := regexp.MustCompile(`^v\d+\.\d+\.x$`)
|
||||
target := os.Getenv("DRONE_TARGET_BRANCH")
|
||||
if target == "" {
|
||||
return "", fmt.Errorf("failed to get DRONE_TARGET_BRANCH environmental variable")
|
||||
} else if target == string(MainMode) {
|
||||
return MainMode, nil
|
||||
}
|
||||
if reRlsBranch.MatchString(target) {
|
||||
return ReleaseBranchMode, nil
|
||||
}
|
||||
if rePRCheckBranch.MatchString(target) {
|
||||
return PullRequestMode, nil
|
||||
}
|
||||
fmt.Printf("unrecognized target branch: %s, defaulting to %s", target, PullRequestMode)
|
||||
return PullRequestMode, nil
|
||||
}
|
||||
|
||||
func CheckSemverSuffix() (ReleaseMode, error) {
|
||||
reBetaRls := regexp.MustCompile(`beta.*`)
|
||||
reTestRls := regexp.MustCompile(`test.*`)
|
||||
tagSuffix, ok := os.LookupEnv("DRONE_SEMVER_PRERELEASE")
|
||||
if !ok || tagSuffix == "" {
|
||||
fmt.Println("DRONE_SEMVER_PRERELEASE doesn't exist for a tag, this is a release event...")
|
||||
return ReleaseMode{Mode: TagMode}, nil
|
||||
}
|
||||
switch {
|
||||
case reBetaRls.MatchString(tagSuffix):
|
||||
return ReleaseMode{Mode: TagMode, IsBeta: true}, nil
|
||||
case reTestRls.MatchString(tagSuffix):
|
||||
return ReleaseMode{Mode: TagMode, IsTest: true}, nil
|
||||
default:
|
||||
fmt.Printf("DRONE_SEMVER_PRERELEASE is custom string, release event with %s suffix\n", tagSuffix)
|
||||
return ReleaseMode{Mode: TagMode}, nil
|
||||
}
|
||||
}
|
||||
|
||||
func GetDroneCommit() (string, error) {
|
||||
commit := strings.TrimSpace(os.Getenv("DRONE_COMMIT"))
|
||||
if commit == "" {
|
||||
return "", fmt.Errorf("the environment variable DRONE_COMMIT is missing")
|
||||
}
|
||||
return commit, nil
|
||||
}
|
||||
|
||||
func shortenBuildID(buildID string) string {
|
||||
buildID = strings.ReplaceAll(buildID, "-", "")
|
||||
if len(buildID) < 9 {
|
||||
return buildID
|
||||
}
|
||||
|
||||
return buildID[0:8]
|
||||
}
|
||||
27
pkg/build/config/version_mode.go
Normal file
27
pkg/build/config/version_mode.go
Normal file
@@ -0,0 +1,27 @@
|
||||
package config
|
||||
|
||||
// VersionMode defines the source event that created a release or published version
|
||||
type VersionMode string
|
||||
|
||||
const (
|
||||
MainMode VersionMode = "main"
|
||||
TagMode VersionMode = "release"
|
||||
ReleaseBranchMode VersionMode = "branch"
|
||||
PullRequestMode VersionMode = "pull_request"
|
||||
DownstreamMode VersionMode = "downstream"
|
||||
Enterprise2Mode VersionMode = "enterprise2"
|
||||
CronjobMode VersionMode = "cron"
|
||||
)
|
||||
|
||||
const (
|
||||
Tag = "tag"
|
||||
PullRequest = "pull_request"
|
||||
Push = "push"
|
||||
Custom = "custom"
|
||||
Promote = "promote"
|
||||
Cronjob = "cron"
|
||||
)
|
||||
|
||||
const (
|
||||
MainBranch = "main"
|
||||
)
|
||||
206
pkg/build/config/versions.go
Normal file
206
pkg/build/config/versions.go
Normal file
@@ -0,0 +1,206 @@
|
||||
package config
|
||||
|
||||
const PublicBucket = "grafana-downloads"
|
||||
|
||||
var Versions = VersionMap{
|
||||
PullRequestMode: {
|
||||
Variants: []Variant{
|
||||
VariantLinuxAmd64,
|
||||
VariantLinuxAmd64Musl,
|
||||
VariantDarwinAmd64,
|
||||
VariantWindowsAmd64,
|
||||
},
|
||||
PluginSignature: PluginSignature{
|
||||
Sign: false,
|
||||
AdminSign: false,
|
||||
},
|
||||
Docker: Docker{
|
||||
ShouldSave: false,
|
||||
Architectures: []Architecture{
|
||||
ArchAMD64,
|
||||
},
|
||||
Distribution: []Distribution{
|
||||
Alpine,
|
||||
},
|
||||
},
|
||||
},
|
||||
MainMode: {
|
||||
Variants: []Variant{
|
||||
VariantArmV6,
|
||||
VariantArmV7,
|
||||
VariantArmV7Musl,
|
||||
VariantArm64,
|
||||
VariantArm64Musl,
|
||||
VariantDarwinAmd64,
|
||||
VariantWindowsAmd64,
|
||||
VariantLinuxAmd64,
|
||||
VariantLinuxAmd64Musl,
|
||||
},
|
||||
PluginSignature: PluginSignature{
|
||||
Sign: true,
|
||||
AdminSign: true,
|
||||
},
|
||||
Docker: Docker{
|
||||
ShouldSave: false,
|
||||
Architectures: []Architecture{
|
||||
ArchAMD64,
|
||||
ArchARM64,
|
||||
ArchARMv7, // GOARCH=ARM is used for both armv6 and armv7. They are differentiated by the GOARM variable.
|
||||
},
|
||||
Distribution: []Distribution{
|
||||
Alpine,
|
||||
Ubuntu,
|
||||
},
|
||||
},
|
||||
Buckets: Buckets{
|
||||
Artifacts: "grafana-downloads",
|
||||
ArtifactsEnterprise2: "grafana-downloads-enterprise2",
|
||||
CDNAssets: "grafana-static-assets",
|
||||
Storybook: "grafana-storybook",
|
||||
},
|
||||
},
|
||||
DownstreamMode: {
|
||||
Variants: []Variant{
|
||||
VariantArmV6,
|
||||
VariantArmV7,
|
||||
VariantArmV7Musl,
|
||||
VariantArm64,
|
||||
VariantArm64Musl,
|
||||
VariantDarwinAmd64,
|
||||
VariantWindowsAmd64,
|
||||
VariantLinuxAmd64,
|
||||
VariantLinuxAmd64Musl,
|
||||
},
|
||||
PluginSignature: PluginSignature{
|
||||
Sign: true,
|
||||
AdminSign: true,
|
||||
},
|
||||
Docker: Docker{
|
||||
ShouldSave: true,
|
||||
Architectures: []Architecture{
|
||||
ArchAMD64,
|
||||
ArchARM64,
|
||||
ArchARMv7, // GOARCH=ARM is used for both armv6 and armv7. They are differentiated by the GOARM variable.
|
||||
},
|
||||
Distribution: []Distribution{
|
||||
Alpine,
|
||||
Ubuntu,
|
||||
},
|
||||
},
|
||||
Buckets: Buckets{
|
||||
Artifacts: "grafana-downloads",
|
||||
ArtifactsEnterprise2: "grafana-downloads-enterprise2",
|
||||
CDNAssets: "grafana-static-assets",
|
||||
},
|
||||
},
|
||||
ReleaseBranchMode: {
|
||||
Variants: []Variant{
|
||||
VariantArmV6,
|
||||
VariantArmV7,
|
||||
VariantArmV7Musl,
|
||||
VariantArm64,
|
||||
VariantArm64Musl,
|
||||
VariantDarwinAmd64,
|
||||
VariantWindowsAmd64,
|
||||
VariantLinuxAmd64,
|
||||
VariantLinuxAmd64Musl,
|
||||
},
|
||||
PluginSignature: PluginSignature{
|
||||
Sign: true,
|
||||
AdminSign: true,
|
||||
},
|
||||
Docker: Docker{
|
||||
ShouldSave: true,
|
||||
Architectures: []Architecture{
|
||||
ArchAMD64,
|
||||
ArchARM64,
|
||||
ArchARMv7,
|
||||
},
|
||||
Distribution: []Distribution{
|
||||
Alpine,
|
||||
Ubuntu,
|
||||
},
|
||||
},
|
||||
Buckets: Buckets{
|
||||
Artifacts: "grafana-downloads",
|
||||
ArtifactsEnterprise2: "grafana-downloads-enterprise2",
|
||||
CDNAssets: "grafana-static-assets",
|
||||
},
|
||||
},
|
||||
TagMode: {
|
||||
Variants: []Variant{
|
||||
VariantArmV6,
|
||||
VariantArmV7,
|
||||
VariantArmV7Musl,
|
||||
VariantArm64,
|
||||
VariantArm64Musl,
|
||||
VariantDarwinAmd64,
|
||||
VariantWindowsAmd64,
|
||||
VariantLinuxAmd64,
|
||||
VariantLinuxAmd64Musl,
|
||||
},
|
||||
PluginSignature: PluginSignature{
|
||||
Sign: true,
|
||||
AdminSign: true,
|
||||
},
|
||||
Docker: Docker{
|
||||
ShouldSave: true,
|
||||
Architectures: []Architecture{
|
||||
ArchAMD64,
|
||||
ArchARM64,
|
||||
ArchARMv7,
|
||||
},
|
||||
Distribution: []Distribution{
|
||||
Alpine,
|
||||
Ubuntu,
|
||||
},
|
||||
PrereleaseBucket: "grafana-prerelease/artifacts/docker",
|
||||
},
|
||||
Buckets: Buckets{
|
||||
Artifacts: "grafana-prerelease/artifacts/downloads",
|
||||
ArtifactsEnterprise2: "grafana-prerelease/artifacts/downloads-enterprise2",
|
||||
CDNAssets: "grafana-prerelease",
|
||||
CDNAssetsDir: "artifacts/static-assets",
|
||||
Storybook: "grafana-prerelease",
|
||||
StorybookSrcDir: "artifacts/storybook",
|
||||
},
|
||||
},
|
||||
Enterprise2Mode: {
|
||||
Variants: []Variant{
|
||||
VariantArmV6,
|
||||
VariantArmV7,
|
||||
VariantArmV7Musl,
|
||||
VariantArm64,
|
||||
VariantArm64Musl,
|
||||
VariantDarwinAmd64,
|
||||
VariantWindowsAmd64,
|
||||
VariantLinuxAmd64,
|
||||
VariantLinuxAmd64Musl,
|
||||
},
|
||||
PluginSignature: PluginSignature{
|
||||
Sign: true,
|
||||
AdminSign: true,
|
||||
},
|
||||
Docker: Docker{
|
||||
ShouldSave: true,
|
||||
Architectures: []Architecture{
|
||||
ArchAMD64,
|
||||
ArchARM64,
|
||||
ArchARMv7,
|
||||
},
|
||||
Distribution: []Distribution{
|
||||
Alpine,
|
||||
Ubuntu,
|
||||
},
|
||||
PrereleaseBucket: "grafana-prerelease/artifacts/docker",
|
||||
},
|
||||
Buckets: Buckets{
|
||||
Artifacts: "grafana-prerelease/artifacts/downloads",
|
||||
ArtifactsEnterprise2: "grafana-prerelease/artifacts/downloads-enterprise2",
|
||||
CDNAssets: "grafana-prerelease",
|
||||
CDNAssetsDir: "artifacts/static-assets",
|
||||
Storybook: "grafana-prerelease",
|
||||
StorybookSrcDir: "artifacts/storybook",
|
||||
},
|
||||
},
|
||||
}
|
||||
35
pkg/build/cryptoutil/md5.go
Normal file
35
pkg/build/cryptoutil/md5.go
Normal file
@@ -0,0 +1,35 @@
|
||||
package cryptoutil
|
||||
|
||||
import (
|
||||
"crypto/md5"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
)
|
||||
|
||||
func MD5File(fpath string) error {
|
||||
// Ignore gosec G304 as this function is only used in the build process.
|
||||
//nolint:gosec
|
||||
fd, err := os.Open(fpath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
if err := fd.Close(); err != nil {
|
||||
log.Printf("error closing file at '%s': %s", fpath, err.Error())
|
||||
}
|
||||
}()
|
||||
|
||||
h := md5.New() // nolint:gosec
|
||||
if _, err = io.Copy(h, fd); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// nolint:gosec
|
||||
if err := os.WriteFile(fpath+".md5", []byte(fmt.Sprintf("%x\n", h.Sum(nil))), 0664); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
2
pkg/build/droneutil/docs.go
Normal file
2
pkg/build/droneutil/docs.go
Normal file
@@ -0,0 +1,2 @@
|
||||
// Package droneutil provides utility functions for working with Drone.
|
||||
package droneutil
|
||||
34
pkg/build/droneutil/event.go
Normal file
34
pkg/build/droneutil/event.go
Normal file
@@ -0,0 +1,34 @@
|
||||
package droneutil
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Lookup is the equivalent of os.LookupEnv, but also accepts a list of strings rather than only checking os.Environ()
|
||||
func Lookup(values []string, val string) (string, bool) {
|
||||
for _, v := range values {
|
||||
prefix := val + "="
|
||||
if strings.HasPrefix(v, prefix) {
|
||||
return strings.TrimPrefix(v, prefix), true
|
||||
}
|
||||
}
|
||||
|
||||
return "", false
|
||||
}
|
||||
|
||||
// GetDroneEvent looks for the "DRONE_BUILD_EVENT" in the provided env list and returns the value.
|
||||
// if it was not found, then an error is returned.
|
||||
func GetDroneEvent(env []string) (string, error) {
|
||||
event, ok := Lookup(env, "DRONE_BUILD_EVENT")
|
||||
if !ok {
|
||||
return "", fmt.Errorf("failed to get DRONE_BUILD_EVENT environmental variable")
|
||||
}
|
||||
return event, nil
|
||||
}
|
||||
|
||||
// GetDroneEventFromEnv returns the value of DRONE_BUILD_EVENT from os.Environ()
|
||||
func GetDroneEventFromEnv() (string, error) {
|
||||
return GetDroneEvent(os.Environ())
|
||||
}
|
||||
36
pkg/build/droneutil/event_test.go
Normal file
36
pkg/build/droneutil/event_test.go
Normal file
@@ -0,0 +1,36 @@
|
||||
package droneutil_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/droneutil"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestGetDroneEvent(t *testing.T) {
|
||||
t.Run("Should return the Drone Event", func(t *testing.T) {
|
||||
env := []string{"DRONE_BUILD_EVENT=pull_request"}
|
||||
droneEvent, err := droneutil.GetDroneEvent(env)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, droneEvent, "pull_request")
|
||||
})
|
||||
t.Run("Should return error, Drone Event env var is missing", func(t *testing.T) {
|
||||
droneEvent, err := droneutil.GetDroneEvent([]string{})
|
||||
require.Error(t, err)
|
||||
require.Empty(t, droneEvent)
|
||||
})
|
||||
}
|
||||
|
||||
func TestLookup(t *testing.T) {
|
||||
env := []string{"", "EXAMPLE_KEY=value", "EXAMPLE_KEY"}
|
||||
t.Run("A valid lookup should return a string and no error", func(t *testing.T) {
|
||||
val, ok := droneutil.Lookup(env, "EXAMPLE_KEY")
|
||||
require.True(t, ok)
|
||||
require.Equal(t, val, "value")
|
||||
})
|
||||
|
||||
t.Run("An invalid lookup should return an error", func(t *testing.T) {
|
||||
_, ok := droneutil.Lookup(env, "EXAMPLE_KEY_DOES_NOT_EXIST")
|
||||
require.False(t, ok)
|
||||
})
|
||||
}
|
||||
61
pkg/build/errutil/group.go
Normal file
61
pkg/build/errutil/group.go
Normal file
@@ -0,0 +1,61 @@
|
||||
package errutil
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log"
|
||||
"sync"
|
||||
)
|
||||
|
||||
type Group struct {
|
||||
cancel func()
|
||||
wg sync.WaitGroup
|
||||
errOnce sync.Once
|
||||
err error
|
||||
}
|
||||
|
||||
func GroupWithContext(ctx context.Context) (*Group, context.Context) {
|
||||
ctx, cancel := context.WithCancel(ctx)
|
||||
return &Group{cancel: cancel}, ctx
|
||||
}
|
||||
|
||||
// Wait waits for any wrapped goroutines to finish and returns any error having occurred in one of them.
|
||||
func (g *Group) Wait() error {
|
||||
log.Println("Waiting on Group")
|
||||
g.wg.Wait()
|
||||
if g.cancel != nil {
|
||||
log.Println("Group canceling its context after waiting")
|
||||
g.cancel()
|
||||
}
|
||||
return g.err
|
||||
}
|
||||
|
||||
// Cancel cancels the associated context.
|
||||
func (g *Group) Cancel() {
|
||||
log.Println("Group's Cancel method being called")
|
||||
g.cancel()
|
||||
}
|
||||
|
||||
// Wrap wraps a function to be executed in a goroutine.
|
||||
func (g *Group) Wrap(f func() error) func() {
|
||||
g.wg.Add(1)
|
||||
return func() {
|
||||
defer g.wg.Done()
|
||||
|
||||
if err := f(); err != nil {
|
||||
g.errOnce.Do(func() {
|
||||
log.Printf("An error occurred in Group: %s", err)
|
||||
g.err = err
|
||||
if g.cancel != nil {
|
||||
log.Println("Group canceling its context due to error")
|
||||
g.cancel()
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Go wraps the provided function and executes it in a goroutine.
|
||||
func (g *Group) Go(f func() error) {
|
||||
wrapped := g.Wrap(f)
|
||||
go wrapped()
|
||||
}
|
||||
46
pkg/build/executil/exec.go
Normal file
46
pkg/build/executil/exec.go
Normal file
@@ -0,0 +1,46 @@
|
||||
package executil
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os/exec"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func RunAt(ctx context.Context, dir, cmd string, args ...string) error {
|
||||
// Ignore gosec G204 as this function is only used in the build process.
|
||||
//nolint:gosec
|
||||
c := exec.CommandContext(ctx, cmd, args...)
|
||||
c.Dir = dir
|
||||
|
||||
b, err := c.CombinedOutput()
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("%w. '%s %v': %s", err, cmd, args, string(b))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func Run(ctx context.Context, cmd string, args ...string) error {
|
||||
return RunAt(ctx, ".", cmd, args...)
|
||||
}
|
||||
|
||||
func OutputAt(ctx context.Context, dir, cmd string, args ...string) (string, error) {
|
||||
// Ignore gosec G204 as this function is only used in the build process.
|
||||
//nolint:gosec
|
||||
c := exec.CommandContext(ctx, cmd, args...)
|
||||
c.Dir = dir
|
||||
|
||||
b, err := c.CombinedOutput()
|
||||
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return strings.TrimSpace(string(b)), nil
|
||||
}
|
||||
|
||||
func Output(ctx context.Context, cmd string, args ...string) (string, error) {
|
||||
return OutputAt(ctx, ".", cmd, args...)
|
||||
}
|
||||
87
pkg/build/fsutil/copy_test.go
Normal file
87
pkg/build/fsutil/copy_test.go
Normal file
@@ -0,0 +1,87 @@
|
||||
package fsutil_test
|
||||
|
||||
import (
|
||||
"os"
|
||||
"runtime"
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/fsutil"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestCopyFile(t *testing.T) {
|
||||
src, err := os.CreateTemp("", "")
|
||||
require.NoError(t, err)
|
||||
defer func() {
|
||||
if err := os.RemoveAll(src.Name()); err != nil {
|
||||
t.Log(err)
|
||||
}
|
||||
}()
|
||||
|
||||
err = os.WriteFile(src.Name(), []byte("Contents"), 0600)
|
||||
require.NoError(t, err)
|
||||
|
||||
dst, err := os.CreateTemp("", "")
|
||||
require.NoError(t, err)
|
||||
defer func() {
|
||||
if err := os.RemoveAll(dst.Name()); err != nil {
|
||||
t.Log(err)
|
||||
}
|
||||
}()
|
||||
|
||||
err = fsutil.CopyFile(src.Name(), dst.Name())
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestCopyFile_Permissions(t *testing.T) {
|
||||
perms := os.FileMode(0700)
|
||||
if runtime.GOOS == "windows" {
|
||||
// Windows doesn't have file Unix style file permissions
|
||||
// It seems you have either 0444 for read-only or 0666 for read-write
|
||||
perms = os.FileMode(0666)
|
||||
}
|
||||
|
||||
src, err := os.CreateTemp("", "")
|
||||
require.NoError(t, err)
|
||||
|
||||
defer func() {
|
||||
if err := os.RemoveAll(src.Name()); err != nil {
|
||||
t.Log(err)
|
||||
}
|
||||
}()
|
||||
|
||||
err = os.WriteFile(src.Name(), []byte("Contents"), perms)
|
||||
require.NoError(t, err)
|
||||
err = os.Chmod(src.Name(), perms)
|
||||
require.NoError(t, err)
|
||||
|
||||
dst, err := os.CreateTemp("", "")
|
||||
require.NoError(t, err)
|
||||
defer func() {
|
||||
if err := os.RemoveAll(dst.Name()); err != nil {
|
||||
t.Log(err)
|
||||
}
|
||||
}()
|
||||
|
||||
err = fsutil.CopyFile(src.Name(), dst.Name())
|
||||
require.NoError(t, err)
|
||||
|
||||
fi, err := os.Stat(dst.Name())
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, perms, fi.Mode()&os.ModePerm)
|
||||
}
|
||||
|
||||
// Test case where destination directory doesn't exist.
|
||||
func TestCopyFile_NonExistentDestDir(t *testing.T) {
|
||||
src, err := os.CreateTemp("", "")
|
||||
require.NoError(t, err)
|
||||
defer func() {
|
||||
if err := os.RemoveAll(src.Name()); err != nil {
|
||||
t.Log(err)
|
||||
}
|
||||
}()
|
||||
|
||||
err = fsutil.CopyFile(src.Name(), "non-existent/dest")
|
||||
require.EqualError(t, err, "destination directory doesn't exist: \"non-existent\"")
|
||||
}
|
||||
107
pkg/build/fsutil/copyfile.go
Normal file
107
pkg/build/fsutil/copyfile.go
Normal file
@@ -0,0 +1,107 @@
|
||||
package fsutil
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
// CopyFile copies a file from src to dst.
|
||||
//
|
||||
// If src and dst files exist, and are the same, then return success. Otherwise, attempt to create a hard link
|
||||
// between the two files. If that fails, copy the file contents from src to dst.
|
||||
func CopyFile(src, dst string) (err error) {
|
||||
absSrc, err := filepath.Abs(src)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get absolute path of source file %q: %w", src, err)
|
||||
}
|
||||
sfi, err := os.Stat(src)
|
||||
if err != nil {
|
||||
err = fmt.Errorf("couldn't stat source file %q: %w", absSrc, err)
|
||||
return
|
||||
}
|
||||
if !sfi.Mode().IsRegular() {
|
||||
// Cannot copy non-regular files (e.g., directories, symlinks, devices, etc.)
|
||||
return fmt.Errorf("non-regular source file %s (%q)", absSrc, sfi.Mode().String())
|
||||
}
|
||||
dpath := filepath.Dir(dst)
|
||||
exists, err := Exists(dpath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !exists {
|
||||
err = fmt.Errorf("destination directory doesn't exist: %q", dpath)
|
||||
return
|
||||
}
|
||||
|
||||
var dfi os.FileInfo
|
||||
dfi, err = os.Stat(dst)
|
||||
if err != nil {
|
||||
if !os.IsNotExist(err) {
|
||||
return
|
||||
}
|
||||
} else {
|
||||
if !(dfi.Mode().IsRegular()) {
|
||||
return fmt.Errorf("non-regular destination file %s (%q)", dfi.Name(), dfi.Mode().String())
|
||||
}
|
||||
if os.SameFile(sfi, dfi) {
|
||||
return copyPermissions(sfi.Name(), dfi.Name())
|
||||
}
|
||||
}
|
||||
|
||||
err = copyFileContents(src, dst)
|
||||
return err
|
||||
}
|
||||
|
||||
// copyFileContents copies the contents of the file named src to the file named
|
||||
// by dst. The file will be created if it does not already exist. If the
|
||||
// destination file exists, all it's contents will be replaced by the contents
|
||||
// of the source file.
|
||||
func copyFileContents(src, dst string) (err error) {
|
||||
//nolint:gosec
|
||||
in, err := os.Open(src)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
defer func() {
|
||||
if err := in.Close(); err != nil {
|
||||
log.Println("error closing file", err)
|
||||
}
|
||||
}()
|
||||
|
||||
//nolint:gosec
|
||||
out, err := os.Create(dst)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
defer func() {
|
||||
if cerr := out.Close(); cerr != nil && err == nil {
|
||||
err = cerr
|
||||
}
|
||||
}()
|
||||
|
||||
if _, err = io.Copy(out, in); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if err := out.Sync(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return copyPermissions(src, dst)
|
||||
}
|
||||
|
||||
func copyPermissions(src, dst string) error {
|
||||
sfi, err := os.Lstat(src)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := os.Chmod(dst, sfi.Mode()); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
43
pkg/build/fsutil/createtemp.go
Normal file
43
pkg/build/fsutil/createtemp.go
Normal file
@@ -0,0 +1,43 @@
|
||||
package fsutil
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
)
|
||||
|
||||
// CreateTempFile generates a temp filepath, based on the provided suffix.
|
||||
// A typical generated path looks like /var/folders/abcd/abcdefg/A/1137975807.
|
||||
func CreateTempFile(sfx string) (string, error) {
|
||||
var suffix string
|
||||
if sfx != "" {
|
||||
suffix = fmt.Sprintf("*-%s", sfx)
|
||||
} else {
|
||||
suffix = sfx
|
||||
}
|
||||
f, err := os.CreateTemp("", suffix)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if err := f.Close(); err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return f.Name(), nil
|
||||
}
|
||||
|
||||
// CreateTempDir generates a temp directory, based on the provided suffix.
|
||||
// A typical generated path looks like /var/folders/abcd/abcdefg/A/1137975807/.
|
||||
func CreateTempDir(sfx string) (string, error) {
|
||||
var suffix string
|
||||
if sfx != "" {
|
||||
suffix = fmt.Sprintf("*-%s", sfx)
|
||||
} else {
|
||||
suffix = sfx
|
||||
}
|
||||
dir, err := os.MkdirTemp("", suffix)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return dir, nil
|
||||
}
|
||||
48
pkg/build/fsutil/createtemp_test.go
Normal file
48
pkg/build/fsutil/createtemp_test.go
Normal file
@@ -0,0 +1,48 @@
|
||||
package fsutil
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestCreateTempFile(t *testing.T) {
|
||||
t.Run("empty suffix, expects pattern like: /var/folders/abcd/abcdefg/A/1137975807", func(t *testing.T) {
|
||||
filePath, err := CreateTempFile("")
|
||||
require.NoError(t, err)
|
||||
|
||||
pathParts := strings.Split(filePath, "/")
|
||||
require.Greater(t, len(pathParts), 1)
|
||||
require.Len(t, strings.Split(pathParts[len(pathParts)-1], "-"), 1)
|
||||
})
|
||||
|
||||
t.Run("non-empty suffix, expects /var/folders/abcd/abcdefg/A/1137975807-foobar", func(t *testing.T) {
|
||||
filePath, err := CreateTempFile("foobar")
|
||||
require.NoError(t, err)
|
||||
|
||||
pathParts := strings.Split(filePath, "/")
|
||||
require.Greater(t, len(pathParts), 1)
|
||||
require.Len(t, strings.Split(pathParts[len(pathParts)-1], "-"), 2)
|
||||
})
|
||||
}
|
||||
|
||||
func TestCreateTempDir(t *testing.T) {
|
||||
t.Run("empty suffix, expects pattern like: /var/folders/abcd/abcdefg/A/1137975807/", func(t *testing.T) {
|
||||
filePath, err := CreateTempFile("")
|
||||
require.NoError(t, err)
|
||||
|
||||
pathParts := strings.Split(filePath, "/")
|
||||
require.Greater(t, len(pathParts), 1)
|
||||
require.Len(t, strings.Split(pathParts[len(pathParts)-1], "-"), 1)
|
||||
})
|
||||
|
||||
t.Run("non-empty suffix, expects /var/folders/abcd/abcdefg/A/1137975807-foobar/", func(t *testing.T) {
|
||||
filePath, err := CreateTempFile("foobar")
|
||||
require.NoError(t, err)
|
||||
|
||||
pathParts := strings.Split(filePath, "/")
|
||||
require.Greater(t, len(pathParts), 1)
|
||||
require.Len(t, strings.Split(pathParts[len(pathParts)-1], "-"), 2)
|
||||
})
|
||||
}
|
||||
15
pkg/build/fsutil/exists_test.go
Normal file
15
pkg/build/fsutil/exists_test.go
Normal file
@@ -0,0 +1,15 @@
|
||||
package fsutil_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/fsutil"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestExists_NonExistent(t *testing.T) {
|
||||
exists, err := fsutil.Exists("non-existent")
|
||||
require.NoError(t, err)
|
||||
|
||||
require.False(t, exists)
|
||||
}
|
||||
16
pkg/build/fsutil/exsits.go
Normal file
16
pkg/build/fsutil/exsits.go
Normal file
@@ -0,0 +1,16 @@
|
||||
package fsutil
|
||||
|
||||
import "os"
|
||||
|
||||
// Exists determines whether a file/directory exists or not.
|
||||
func Exists(fpath string) (bool, error) {
|
||||
_, err := os.Stat(fpath)
|
||||
if err != nil {
|
||||
if !os.IsNotExist(err) {
|
||||
return false, err
|
||||
}
|
||||
return false, nil
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
65
pkg/build/gcloud/auth.go
Normal file
65
pkg/build/gcloud/auth.go
Normal file
@@ -0,0 +1,65 @@
|
||||
package gcloud
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func GetDecodedKey() ([]byte, error) {
|
||||
gcpKey := strings.TrimSpace(os.Getenv("GCP_KEY"))
|
||||
if gcpKey == "" {
|
||||
return nil, fmt.Errorf("the environment variable GCP_KEY must be set")
|
||||
}
|
||||
|
||||
gcpKeyB, err := base64.StdEncoding.DecodeString(gcpKey)
|
||||
if err != nil {
|
||||
// key is not always base64 encoded
|
||||
validKey := []byte(gcpKey)
|
||||
if json.Valid(validKey) {
|
||||
return validKey, nil
|
||||
}
|
||||
return nil, fmt.Errorf("error decoding the gcp_key, err: %q", err)
|
||||
}
|
||||
|
||||
return gcpKeyB, nil
|
||||
}
|
||||
|
||||
func ActivateServiceAccount() error {
|
||||
byteKey, err := GetDecodedKey()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
f, err := os.CreateTemp("", "*.json")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
if err := os.Remove(f.Name()); err != nil {
|
||||
log.Printf("error removing %s: %s", f.Name(), err)
|
||||
}
|
||||
}()
|
||||
|
||||
defer func() {
|
||||
if err := f.Close(); err != nil {
|
||||
log.Println("error closing file:", err)
|
||||
}
|
||||
}()
|
||||
|
||||
if _, err := f.Write(byteKey); err != nil {
|
||||
return fmt.Errorf("failed to write GCP key file: %w", err)
|
||||
}
|
||||
keyArg := fmt.Sprintf("--key-file=%s", f.Name())
|
||||
//nolint:gosec
|
||||
cmd := exec.Command("gcloud", "auth", "activate-service-account", keyArg)
|
||||
|
||||
if output, err := cmd.CombinedOutput(); err != nil {
|
||||
return fmt.Errorf("failed to sign into GCP: %w\n%s", err, output)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
457
pkg/build/gcloud/storage/gsutil.go
Normal file
457
pkg/build/gcloud/storage/gsutil.go
Normal file
@@ -0,0 +1,457 @@
|
||||
package storage
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"mime"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"cloud.google.com/go/storage"
|
||||
"github.com/grafana/grafana/pkg/build/fsutil"
|
||||
"github.com/grafana/grafana/pkg/build/gcloud"
|
||||
"google.golang.org/api/iterator"
|
||||
"google.golang.org/api/option"
|
||||
)
|
||||
|
||||
var (
|
||||
// ErrorNilBucket is returned when a function is called where a bucket argument is expected and the bucket is nil.
|
||||
ErrorNilBucket = errors.New("a bucket must be provided")
|
||||
)
|
||||
|
||||
const (
|
||||
// maxThreads specify the number of max threads that can run at the same time.
|
||||
// Set to 1000, since the maximum number of simultaneous open files for the runners is 1024.
|
||||
maxThreads = 1000
|
||||
)
|
||||
|
||||
// Client wraps the gcloud storage Client with convenient helper functions.
|
||||
// By using an embedded type we can still use the functions provided by storage.Client if we need to.
|
||||
type Client struct {
|
||||
storage.Client
|
||||
}
|
||||
|
||||
// File represents a file in Google Cloud Storage.
|
||||
type File struct {
|
||||
FullPath string
|
||||
PathTrimmed string
|
||||
}
|
||||
|
||||
// New creates a new Client by checking for the Google Cloud SDK auth key and/or environment variable.
|
||||
func New() (*Client, error) {
|
||||
client, err := newClient()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &Client{
|
||||
Client: *client,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// newClient initializes the google-cloud-storage (GCS) client.
|
||||
// It first checks for the application-default_credentials.json file then the GCP_KEY environment variable.
|
||||
func newClient() (*storage.Client, error) {
|
||||
ctx := context.Background()
|
||||
|
||||
byteKey, err := gcloud.GetDecodedKey()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get gcp key, err: %w", err)
|
||||
}
|
||||
client, err := storage.NewClient(ctx, option.WithCredentialsJSON(byteKey))
|
||||
if err != nil {
|
||||
log.Println("failed to login with GCP_KEY, trying with default application credentials...")
|
||||
client, err = storage.NewClient(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open Google Cloud Storage client: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
return client, nil
|
||||
}
|
||||
|
||||
// CopyLocalDir copies a local directory 'dir' to the bucket 'bucket' at the path 'bucketPath'.
|
||||
func (client *Client) CopyLocalDir(ctx context.Context, dir string, bucket *storage.BucketHandle, bucketPath string, trim bool) error {
|
||||
if bucket == nil {
|
||||
return ErrorNilBucket
|
||||
}
|
||||
|
||||
files, err := ListLocalFiles(dir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
log.Printf("Number or files to be copied over: %d\n", len(files))
|
||||
|
||||
for _, chunk := range asChunks(files, maxThreads) {
|
||||
var wg sync.WaitGroup
|
||||
for _, f := range chunk {
|
||||
wg.Add(1)
|
||||
go func(file File) {
|
||||
defer wg.Done()
|
||||
err = client.Copy(ctx, file, bucket, bucketPath, trim)
|
||||
if err != nil {
|
||||
log.Printf("failed to copy objects, err: %s\n", err.Error())
|
||||
}
|
||||
}(f)
|
||||
}
|
||||
wg.Wait()
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Copy copies a single local file into the bucket at the provided path.
|
||||
// trim variable should be set to true if the full object path is needed - false otherwise.
|
||||
func (client *Client) Copy(ctx context.Context, file File, bucket *storage.BucketHandle, remote string, trim bool) error {
|
||||
if bucket == nil {
|
||||
return ErrorNilBucket
|
||||
}
|
||||
|
||||
localFile, err := os.Open(file.FullPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to open file %s, err: %q", file.FullPath, err)
|
||||
}
|
||||
defer func() {
|
||||
if err := localFile.Close(); err != nil {
|
||||
log.Println("failed to close localfile", "err", err)
|
||||
}
|
||||
}()
|
||||
|
||||
extension := strings.ToLower(path.Ext(file.FullPath))
|
||||
contentType := mime.TypeByExtension(extension)
|
||||
|
||||
filePath := file.FullPath
|
||||
if trim {
|
||||
filePath = file.PathTrimmed
|
||||
}
|
||||
|
||||
objectPath := path.Join(remote, filePath)
|
||||
|
||||
wc := bucket.Object(objectPath).NewWriter(ctx)
|
||||
wc.ContentType = contentType
|
||||
defer func() {
|
||||
if err := wc.Close(); err != nil {
|
||||
log.Println("failed to close writer", "err", err)
|
||||
}
|
||||
}()
|
||||
|
||||
if _, err = io.Copy(wc, localFile); err != nil {
|
||||
return fmt.Errorf("failed to copy to Cloud Storage: %w", err)
|
||||
}
|
||||
|
||||
log.Printf("Successfully uploaded tarball to Google Cloud Storage, path: %s/%s\n", remote, file.FullPath)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// CopyRemoteDir copies an entire directory 'from' from the bucket 'fromBucket' into the 'toBucket' at the path 'to'.
|
||||
func (client *Client) CopyRemoteDir(ctx context.Context, fromBucket *storage.BucketHandle, from string, toBucket *storage.BucketHandle, to string) error {
|
||||
if toBucket == nil || fromBucket == nil {
|
||||
return ErrorNilBucket
|
||||
}
|
||||
|
||||
files, err := ListRemoteFiles(ctx, fromBucket, FilesFilter{Prefix: from})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var ch = make(chan File, len(files))
|
||||
var wg sync.WaitGroup
|
||||
wg.Add(maxThreads)
|
||||
|
||||
for i := 0; i < maxThreads; i++ {
|
||||
go func() {
|
||||
for {
|
||||
file, ok := <-ch
|
||||
if !ok {
|
||||
wg.Done()
|
||||
return
|
||||
}
|
||||
if err := client.RemoteCopy(ctx, file, fromBucket, toBucket, to); err != nil {
|
||||
log.Printf("failed to copy files between buckets: err: %s\n", err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
ch <- file
|
||||
}
|
||||
|
||||
close(ch)
|
||||
wg.Wait()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// RemoteCopy will copy the file 'file' from the 'fromBucket' to the 'toBucket' at the path 'path'.
|
||||
func (client *Client) RemoteCopy(ctx context.Context, file File, fromBucket, toBucket *storage.BucketHandle, path string) error {
|
||||
// Should this be path.Join instead of filepath.Join? filepath.Join on Windows will produce `\\` separators instead of `/`.
|
||||
var (
|
||||
src = fromBucket.Object(file.FullPath)
|
||||
dstObject = filepath.Join(path, file.PathTrimmed)
|
||||
dst = toBucket.Object(dstObject)
|
||||
)
|
||||
|
||||
if _, err := dst.CopierFrom(src).Run(ctx); err != nil {
|
||||
return fmt.Errorf("failed to copy object %s, to %s, err: %w", file.FullPath, dstObject, err)
|
||||
}
|
||||
|
||||
log.Printf("%s was successfully copied to %v bucket!.\n\n", file.FullPath, toBucket)
|
||||
return nil
|
||||
}
|
||||
|
||||
// DeleteDir deletes a directory at 'path' from the bucket.
|
||||
func (client *Client) DeleteDir(ctx context.Context, bucket *storage.BucketHandle, path string) error {
|
||||
if bucket == nil {
|
||||
return ErrorNilBucket
|
||||
}
|
||||
|
||||
files, err := ListRemoteFiles(ctx, bucket, FilesFilter{Prefix: path})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var ch = make(chan string, len(files))
|
||||
var wg sync.WaitGroup
|
||||
wg.Add(maxThreads)
|
||||
|
||||
for i := 0; i < maxThreads; i++ {
|
||||
go func() {
|
||||
for {
|
||||
fullPath, ok := <-ch
|
||||
if !ok {
|
||||
wg.Done()
|
||||
return
|
||||
}
|
||||
err := client.Delete(ctx, bucket, fullPath)
|
||||
if err != nil && !errors.Is(err, storage.ErrObjectNotExist) {
|
||||
log.Printf("failed to delete objects, err %s\n", err.Error())
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
ch <- file.FullPath
|
||||
}
|
||||
|
||||
close(ch)
|
||||
wg.Wait()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Delete deletes single item from the bucket at 'path'.
|
||||
func (client *Client) Delete(ctx context.Context, bucket *storage.BucketHandle, path string) error {
|
||||
object := bucket.Object(path)
|
||||
if err := object.Delete(ctx); err != nil {
|
||||
return fmt.Errorf("cannot delete %s, err: %w", path, err)
|
||||
}
|
||||
log.Printf("Successfully deleted tarball to Google Cloud Storage, path: %s", path)
|
||||
return nil
|
||||
}
|
||||
|
||||
// ListLocalFiles lists files in a local filesystem.
|
||||
func ListLocalFiles(dir string) ([]File, error) {
|
||||
var files []File
|
||||
err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error {
|
||||
if !info.IsDir() {
|
||||
files = append(files, File{
|
||||
FullPath: path,
|
||||
// Strip the dir name from the filepath
|
||||
PathTrimmed: strings.ReplaceAll(path, dir, ""),
|
||||
})
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error walking path: %v", err)
|
||||
}
|
||||
|
||||
return files, nil
|
||||
}
|
||||
|
||||
type FilesFilter struct {
|
||||
Prefix string
|
||||
FileExts []string
|
||||
}
|
||||
|
||||
// ListRemoteFiles lists all the files in the directory (filtering by FilesFilter) and returns a File struct for each one.
|
||||
func ListRemoteFiles(ctx context.Context, bucket *storage.BucketHandle, filter FilesFilter) ([]File, error) {
|
||||
if bucket == nil {
|
||||
return []File{}, ErrorNilBucket
|
||||
}
|
||||
|
||||
it := bucket.Objects(ctx, &storage.Query{
|
||||
Prefix: filter.Prefix,
|
||||
})
|
||||
|
||||
var files []File
|
||||
for {
|
||||
attrs, err := it.Next()
|
||||
if err != nil {
|
||||
if errors.Is(err, iterator.Done) {
|
||||
break
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to iterate through bucket, err: %w", err)
|
||||
}
|
||||
|
||||
extMatch := len(filter.FileExts) == 0
|
||||
for _, ext := range filter.FileExts {
|
||||
if ext == filepath.Ext(attrs.Name) {
|
||||
extMatch = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if extMatch {
|
||||
files = append(files, File{FullPath: attrs.Name, PathTrimmed: strings.TrimPrefix(attrs.Name, filter.Prefix)})
|
||||
}
|
||||
}
|
||||
|
||||
return files, nil
|
||||
}
|
||||
|
||||
// DownloadDirectory downloads files from bucket (filtering by FilesFilter) to destPath on disk.
|
||||
func (client *Client) DownloadDirectory(ctx context.Context, bucket *storage.BucketHandle, destPath string, filter FilesFilter) error {
|
||||
if bucket == nil {
|
||||
return ErrorNilBucket
|
||||
}
|
||||
|
||||
files, err := ListRemoteFiles(ctx, bucket, filter)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// return err if dir already exists
|
||||
exists, err := fsutil.Exists(destPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if exists {
|
||||
return fmt.Errorf("destination path %q already exists", destPath)
|
||||
}
|
||||
|
||||
err = os.MkdirAll(destPath, 0750)
|
||||
if err != nil && !os.IsExist(err) {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
err = client.downloadFile(ctx, bucket, file.FullPath, filepath.Join(destPath, file.PathTrimmed))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetLatestMainBuild gets the latest main build which is successfully uploaded to the gcs bucket.
|
||||
func GetLatestMainBuild(ctx context.Context, bucket *storage.BucketHandle, path string) (string, error) {
|
||||
if bucket == nil {
|
||||
return "", ErrorNilBucket
|
||||
}
|
||||
|
||||
it := bucket.Objects(ctx, &storage.Query{
|
||||
Prefix: path,
|
||||
})
|
||||
|
||||
var files []string
|
||||
for {
|
||||
attrs, err := it.Next()
|
||||
if errors.Is(err, iterator.Done) {
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to iterate through bucket, err: %w", err)
|
||||
}
|
||||
|
||||
files = append(files, attrs.Name)
|
||||
}
|
||||
|
||||
var latestVersion string
|
||||
for i := len(files) - 1; i >= 0; i-- {
|
||||
captureVersion := regexp.MustCompile(`(\d+\.\d+\.\d+-\d+pre)`)
|
||||
if captureVersion.MatchString(files[i]) {
|
||||
latestVersion = captureVersion.FindString(files[i])
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return latestVersion, nil
|
||||
}
|
||||
|
||||
// downloadFile downloads an object to a file.
|
||||
func (client *Client) downloadFile(ctx context.Context, bucket *storage.BucketHandle, objectName, destFileName string) error {
|
||||
if bucket == nil {
|
||||
return ErrorNilBucket
|
||||
}
|
||||
|
||||
ctx, cancel := context.WithTimeout(ctx, time.Second*10)
|
||||
defer cancel()
|
||||
|
||||
// nolint:gosec
|
||||
f, err := os.Create(destFileName)
|
||||
if err != nil {
|
||||
return fmt.Errorf("os.Create: %v", err)
|
||||
}
|
||||
|
||||
rc, err := bucket.Object(objectName).NewReader(ctx)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Object(%q).NewReader: %v", objectName, err)
|
||||
}
|
||||
defer func() {
|
||||
if err := rc.Close(); err != nil {
|
||||
log.Println("failed to close reader", "err", err)
|
||||
}
|
||||
}()
|
||||
|
||||
if _, err := io.Copy(f, rc); err != nil {
|
||||
return fmt.Errorf("io.Copy: %v", err)
|
||||
}
|
||||
|
||||
if err = f.Close(); err != nil {
|
||||
return fmt.Errorf("f.Close: %v", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// asChunks will split the supplied []File into slices with a max size of `chunkSize`
|
||||
// []string{"a", "b", "c"}, 1 => [][]string{[]string{"a"}, []string{"b"}, []string{"c"}}
|
||||
// []string{"a", "b", "c"}, 2 => [][]string{[]string{"a", "b"}, []string{"c"}}.
|
||||
func asChunks(files []File, chunkSize int) [][]File {
|
||||
var fileChunks [][]File
|
||||
|
||||
if len(files) == 0 {
|
||||
return [][]File{}
|
||||
}
|
||||
|
||||
if len(files) > chunkSize && chunkSize > 0 {
|
||||
for i := 0; i < len(files); i += chunkSize {
|
||||
end := i + chunkSize
|
||||
|
||||
if end > len(files) {
|
||||
end = len(files)
|
||||
}
|
||||
fileChunks = append(fileChunks, files[i:end])
|
||||
}
|
||||
} else {
|
||||
fileChunks = [][]File{files}
|
||||
}
|
||||
return fileChunks
|
||||
}
|
||||
159
pkg/build/gcloud/storage/gsutil_test.go
Normal file
159
pkg/build/gcloud/storage/gsutil_test.go
Normal file
@@ -0,0 +1,159 @@
|
||||
package storage
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func Test_asChunks(t *testing.T) {
|
||||
type args struct {
|
||||
files []File
|
||||
chunkSize int
|
||||
}
|
||||
tcs := []struct {
|
||||
name string
|
||||
args args
|
||||
expected [][]File
|
||||
}{
|
||||
{
|
||||
name: "Happy path #1",
|
||||
args: args{
|
||||
files: []File{
|
||||
{FullPath: "/a"},
|
||||
{FullPath: "/b"},
|
||||
{FullPath: "/c"},
|
||||
{FullPath: "/1"},
|
||||
{FullPath: "/2"},
|
||||
{FullPath: "/3"},
|
||||
},
|
||||
chunkSize: 5,
|
||||
},
|
||||
expected: [][]File{
|
||||
{{FullPath: "/a"}, {FullPath: "/b"}, {FullPath: "/c"}, {FullPath: "/1"}, {FullPath: "/2"}},
|
||||
{{FullPath: "/3"}},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Happy path #2",
|
||||
args: args{
|
||||
files: []File{
|
||||
{FullPath: "/a"},
|
||||
{FullPath: "/b"},
|
||||
{FullPath: "/c"},
|
||||
{FullPath: "/1"},
|
||||
{FullPath: "/2"},
|
||||
{FullPath: "/3"},
|
||||
},
|
||||
chunkSize: 2,
|
||||
},
|
||||
expected: [][]File{
|
||||
{{FullPath: "/a"}, {FullPath: "/b"}},
|
||||
{{FullPath: "/c"}, {FullPath: "/1"}},
|
||||
{{FullPath: "/2"}, {FullPath: "/3"}},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Happy path #3",
|
||||
args: args{
|
||||
files: []File{
|
||||
{FullPath: "/a"},
|
||||
{FullPath: "/b"},
|
||||
{FullPath: "/c"},
|
||||
},
|
||||
chunkSize: 1,
|
||||
},
|
||||
expected: [][]File{
|
||||
{{FullPath: "/a"}},
|
||||
{{FullPath: "/b"}},
|
||||
{{FullPath: "/c"}},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "A chunkSize with 0 value returns the input as a single chunk",
|
||||
args: args{
|
||||
files: []File{
|
||||
{FullPath: "/a"},
|
||||
{FullPath: "/b"},
|
||||
{FullPath: "/c"},
|
||||
},
|
||||
chunkSize: 0,
|
||||
},
|
||||
expected: [][]File{
|
||||
{
|
||||
{FullPath: "/a"},
|
||||
{FullPath: "/b"},
|
||||
{FullPath: "/c"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "A chunkSize with negative value returns the input as a single chunk",
|
||||
args: args{
|
||||
files: []File{
|
||||
{FullPath: "/a"},
|
||||
{FullPath: "/b"},
|
||||
{FullPath: "/c"},
|
||||
},
|
||||
chunkSize: -1,
|
||||
},
|
||||
expected: [][]File{
|
||||
{
|
||||
{FullPath: "/a"},
|
||||
{FullPath: "/b"},
|
||||
{FullPath: "/c"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "A chunkSize greater than the size on input returns the input as a single chunk",
|
||||
args: args{
|
||||
files: []File{
|
||||
{FullPath: "/a"},
|
||||
{FullPath: "/b"},
|
||||
{FullPath: "/c"},
|
||||
},
|
||||
chunkSize: 5,
|
||||
},
|
||||
expected: [][]File{
|
||||
{
|
||||
{FullPath: "/a"},
|
||||
{FullPath: "/b"},
|
||||
{FullPath: "/c"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "A chunkSize equal the size on input returns the input as a single chunk",
|
||||
args: args{
|
||||
files: []File{
|
||||
{FullPath: "/a"},
|
||||
{FullPath: "/b"},
|
||||
{FullPath: "/c"},
|
||||
},
|
||||
chunkSize: 3,
|
||||
},
|
||||
expected: [][]File{
|
||||
{
|
||||
{FullPath: "/a"},
|
||||
{FullPath: "/b"},
|
||||
{FullPath: "/c"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "An empty input returns empty chunks",
|
||||
args: args{
|
||||
files: []File{},
|
||||
chunkSize: 3,
|
||||
},
|
||||
expected: [][]File{},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
result := asChunks(tc.args.files, tc.args.chunkSize)
|
||||
require.Equal(t, tc.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
25
pkg/build/git/git.go
Normal file
25
pkg/build/git/git.go
Normal file
@@ -0,0 +1,25 @@
|
||||
package git
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
)
|
||||
|
||||
const (
|
||||
MainBranch = "main"
|
||||
HomeDir = "."
|
||||
RepoOwner = "grafana"
|
||||
OSSRepo = "grafana"
|
||||
EnterpriseRepo = "grafana-enterprise"
|
||||
EnterpriseCheckName = "Grafana Enterprise"
|
||||
EnterpriseCheckDescription = "Downstream tests to ensure that your changes are compatible with Grafana Enterprise"
|
||||
)
|
||||
|
||||
func PRCheckRegexp() *regexp.Regexp {
|
||||
reBranch, err := regexp.Compile(`^prc-([0-9]+)-([A-Za-z0-9]+)\/(.+)$`)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("Failed to compile regexp: %s", err))
|
||||
}
|
||||
|
||||
return reBranch
|
||||
}
|
||||
56
pkg/build/git/git_test.go
Normal file
56
pkg/build/git/git_test.go
Normal file
@@ -0,0 +1,56 @@
|
||||
package git_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/git"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestPRCheckRegexp(t *testing.T) {
|
||||
type match struct {
|
||||
String string
|
||||
Commit string
|
||||
Branch string
|
||||
PR string
|
||||
}
|
||||
|
||||
var (
|
||||
shouldMatch = []match{
|
||||
{
|
||||
String: "prc-1-a1b2c3d4/branch-name",
|
||||
Branch: "branch-name",
|
||||
Commit: "a1b2c3d4",
|
||||
PR: "1",
|
||||
},
|
||||
{
|
||||
String: "prc-111-a1b2c3d4/branch/name",
|
||||
Branch: "branch/name",
|
||||
Commit: "a1b2c3d4",
|
||||
PR: "111",
|
||||
},
|
||||
{
|
||||
String: "prc-102930122-a1b2c3d4/branch-name",
|
||||
Branch: "branch-name",
|
||||
Commit: "a1b2c3d4",
|
||||
PR: "102930122",
|
||||
},
|
||||
}
|
||||
|
||||
shouldNotMatch = []string{"prc-a/branch", "km/test", "test", "prc", "prc/test", "price"}
|
||||
)
|
||||
|
||||
regex := git.PRCheckRegexp()
|
||||
|
||||
for _, v := range shouldMatch {
|
||||
assert.Truef(t, regex.MatchString(v.String), "regex '%s' should match %s", regex.String(), v)
|
||||
m := regex.FindStringSubmatch(v.String)
|
||||
assert.Equal(t, m[1], v.PR)
|
||||
assert.Equal(t, m[2], v.Commit)
|
||||
assert.Equal(t, m[3], v.Branch)
|
||||
}
|
||||
|
||||
for _, v := range shouldNotMatch {
|
||||
assert.False(t, regex.MatchString(v), "regex '%s' should not match %s", regex.String(), v)
|
||||
}
|
||||
}
|
||||
124
pkg/build/golangutils/build.go
Normal file
124
pkg/build/golangutils/build.go
Normal file
@@ -0,0 +1,124 @@
|
||||
package golangutils
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"os/exec"
|
||||
"strings"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/config"
|
||||
)
|
||||
|
||||
type BuildOpts struct {
|
||||
// Package refers to the path to the `main` package containing `func main`
|
||||
Package string
|
||||
|
||||
// Output is used as the -o argument in the go build command
|
||||
Output string
|
||||
|
||||
// Workdir should define some place in the module where the package path resolves.
|
||||
// Go commands need to be ran inside a the Go module directory.
|
||||
Workdir string
|
||||
|
||||
GoOS config.OS
|
||||
GoArch config.Architecture
|
||||
GoArm string
|
||||
Go386 string
|
||||
CC string
|
||||
LibC string
|
||||
|
||||
CGoEnabled bool
|
||||
CGoCFlags string
|
||||
|
||||
// LdFlags are joined by a space character and provided to the -ldflags argument.
|
||||
// A valid element here would be `-X 'main.version=1.0.0'`.
|
||||
LdFlags []string
|
||||
|
||||
Stdout io.ReadWriter
|
||||
Stderr io.ReadWriter
|
||||
Stdin io.ReadWriter
|
||||
|
||||
// ExtraEnv allows consumers to provide extra env args that are not defined above.
|
||||
// A single element should be formatted using like so: {NAME}={VALUE}. Example: GOOS=linux.
|
||||
ExtraEnv []string
|
||||
|
||||
// ExtraArgs allows consumers to provide extra arguments that are not defined above.
|
||||
// Flag names and values should be two separate elements.
|
||||
// These flags will be appended to the command arguments before the package path in "go build".
|
||||
ExtraArgs []string
|
||||
}
|
||||
|
||||
// Env constructs a list of key/value pairs for setting a build command's environment.
|
||||
// Should we consider using something to unmarshal the struct to env?
|
||||
func (opts BuildOpts) Env() []string {
|
||||
env := []string{}
|
||||
if opts.CGoEnabled {
|
||||
env = append(env, "CGO_ENABLED=1")
|
||||
}
|
||||
|
||||
if opts.GoOS != "" {
|
||||
env = append(env, fmt.Sprintf("GOOS=%s", opts.GoOS))
|
||||
}
|
||||
|
||||
if opts.GoArch != "" {
|
||||
env = append(env, fmt.Sprintf("GOARCH=%s", opts.GoArch))
|
||||
}
|
||||
|
||||
if opts.CC != "" {
|
||||
env = append(env, fmt.Sprintf("CC=%s", opts.CC))
|
||||
}
|
||||
|
||||
if opts.CGoCFlags != "" {
|
||||
env = append(env, fmt.Sprintf("CGO_CFLAGS=%s", opts.CGoCFlags))
|
||||
}
|
||||
|
||||
if opts.GoArm != "" {
|
||||
env = append(env, fmt.Sprintf("GOARM=%s", opts.GoArm))
|
||||
}
|
||||
|
||||
if opts.ExtraEnv != nil {
|
||||
return append(opts.ExtraEnv, env...)
|
||||
}
|
||||
|
||||
return env
|
||||
}
|
||||
|
||||
// Args constructs a list of flags and values for use with the exec.Command type when running "go build".
|
||||
func (opts BuildOpts) Args() []string {
|
||||
args := []string{}
|
||||
|
||||
if opts.LdFlags != nil {
|
||||
args = append(args, "-ldflags", strings.Join(opts.LdFlags, " "))
|
||||
}
|
||||
|
||||
if opts.Output != "" {
|
||||
args = append(args, "-o", opts.Output)
|
||||
}
|
||||
|
||||
if opts.ExtraArgs != nil {
|
||||
args = append(args, opts.ExtraArgs...)
|
||||
}
|
||||
|
||||
args = append(args, opts.Package)
|
||||
|
||||
return args
|
||||
}
|
||||
|
||||
// Build runs the go build process in the current shell given the opts.
|
||||
// This function will panic if no Stdout/Stderr/Stdin is provided in the opts.
|
||||
func RunBuild(ctx context.Context, opts BuildOpts) error {
|
||||
env := opts.Env()
|
||||
args := append([]string{"build"}, opts.Args()...)
|
||||
// Ignore gosec G304 as this function is only used in the build process.
|
||||
//nolint:gosec
|
||||
cmd := exec.CommandContext(ctx, "go", args...)
|
||||
cmd.Env = env
|
||||
|
||||
cmd.Stdout = opts.Stdout
|
||||
cmd.Stderr = opts.Stderr
|
||||
cmd.Stdin = opts.Stdin
|
||||
cmd.Dir = opts.Workdir
|
||||
|
||||
return cmd.Run()
|
||||
}
|
||||
2
pkg/build/golangutils/doc.go
Normal file
2
pkg/build/golangutils/doc.go
Normal file
@@ -0,0 +1,2 @@
|
||||
// Package golangutils holds utility functions, wrappers, and types for building Go binaries for Grafana.
|
||||
package golangutils
|
||||
123
pkg/build/grafana/build.go
Normal file
123
pkg/build/grafana/build.go
Normal file
@@ -0,0 +1,123 @@
|
||||
package grafana
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/config"
|
||||
"github.com/grafana/grafana/pkg/build/cryptoutil"
|
||||
"github.com/grafana/grafana/pkg/build/golangutils"
|
||||
)
|
||||
|
||||
var binaries = []string{"grafana", "grafana-server", "grafana-cli"}
|
||||
|
||||
const (
|
||||
SuffixEnterprise2 = "-enterprise2"
|
||||
)
|
||||
|
||||
const (
|
||||
ExtensionExe = ".exe"
|
||||
)
|
||||
|
||||
func GrafanaLDFlags(version string, r config.Revision) []string {
|
||||
return []string{
|
||||
"-w",
|
||||
fmt.Sprintf("-X main.version=%s", version),
|
||||
fmt.Sprintf("-X main.commit=%s", r.SHA256),
|
||||
fmt.Sprintf("-X main.buildstamp=%d", r.Timestamp),
|
||||
fmt.Sprintf("-X main.buildBranch=%s", r.Branch),
|
||||
}
|
||||
}
|
||||
|
||||
// BinaryFolder returns the path to where the Grafana binary is build given the provided arguments.
|
||||
func BinaryFolder(edition config.Edition, args BuildArgs) string {
|
||||
sfx := ""
|
||||
if edition == config.EditionEnterprise2 {
|
||||
sfx = SuffixEnterprise2
|
||||
}
|
||||
|
||||
arch := string(args.GoArch)
|
||||
if args.GoArch == config.ArchARM {
|
||||
arch = string(args.GoArch) + "v" + args.GoArm
|
||||
}
|
||||
|
||||
format := fmt.Sprintf("%s-%s", args.GoOS, arch)
|
||||
if args.LibC != "" {
|
||||
format += fmt.Sprintf("-%s", args.LibC)
|
||||
}
|
||||
format += sfx
|
||||
|
||||
if args.GoOS == config.OSWindows {
|
||||
format += ExtensionExe
|
||||
}
|
||||
|
||||
return format
|
||||
}
|
||||
|
||||
func GrafanaDescriptor(opts golangutils.BuildOpts) string {
|
||||
libcPart := ""
|
||||
if opts.LibC != "" {
|
||||
libcPart = fmt.Sprintf("/%s", opts.LibC)
|
||||
}
|
||||
arch := string(opts.GoArch)
|
||||
if opts.GoArch == config.ArchARM {
|
||||
arch = string(opts.GoArch) + "v" + opts.GoArm
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s/%s%s", opts.GoOS, arch, libcPart)
|
||||
}
|
||||
|
||||
// BuildGrafanaBinary builds a certain binary according to certain parameters.
|
||||
func BuildGrafanaBinary(ctx context.Context, name, version string, args BuildArgs, edition config.Edition) error {
|
||||
opts := args.BuildOpts
|
||||
opts.ExtraEnv = os.Environ()
|
||||
|
||||
revision, err := config.GrafanaRevision(ctx, opts.Workdir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
folder := BinaryFolder(edition, args)
|
||||
|
||||
if opts.GoOS == config.OSWindows {
|
||||
name += ExtensionExe
|
||||
}
|
||||
|
||||
binary := filepath.Join(opts.Workdir, "bin", folder, name)
|
||||
opts.Output = binary
|
||||
|
||||
if err := os.RemoveAll(binary); err != nil {
|
||||
return fmt.Errorf("failed to remove %q: %w", binary, err)
|
||||
}
|
||||
|
||||
if err := os.RemoveAll(binary + ".md5"); err != nil {
|
||||
return fmt.Errorf("failed to remove %q: %w", binary+".md5", err)
|
||||
}
|
||||
|
||||
descriptor := GrafanaDescriptor(opts)
|
||||
|
||||
log.Printf("Building %q for %s\nwith env: %v", binary, descriptor, opts.Env())
|
||||
|
||||
opts.LdFlags = append(args.LdFlags, GrafanaLDFlags(version, revision)...)
|
||||
|
||||
if edition == config.EditionEnterprise2 {
|
||||
opts.ExtraArgs = []string{"-tags=pro"}
|
||||
}
|
||||
|
||||
log.Printf("Running command 'go %s'", opts.Args())
|
||||
|
||||
if err := golangutils.RunBuild(ctx, opts); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Create an MD5 checksum of the binary, to be included in the archive for
|
||||
// automatic upgrades.
|
||||
if err := cryptoutil.MD5File(binary); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
160
pkg/build/grafana/variant.go
Normal file
160
pkg/build/grafana/variant.go
Normal file
@@ -0,0 +1,160 @@
|
||||
package grafana
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/compilers"
|
||||
"github.com/grafana/grafana/pkg/build/config"
|
||||
"github.com/grafana/grafana/pkg/build/golangutils"
|
||||
)
|
||||
|
||||
// BuildArgs represent the build parameters that define the "go build" behavior of a single variant.
|
||||
// These arguments are applied as environment variables and arguments to the "go build" command.
|
||||
type BuildArgs struct {
|
||||
golangutils.BuildOpts
|
||||
DebArch config.Architecture
|
||||
RPMArch config.Architecture
|
||||
}
|
||||
|
||||
type BuildVariantOpts struct {
|
||||
Variant config.Variant
|
||||
Edition config.Edition
|
||||
|
||||
Version string
|
||||
GrafanaDir string
|
||||
}
|
||||
|
||||
// BuildVariant builds a certain variant of the grafana-server and grafana-cli binaries sequentially.
|
||||
func BuildVariant(ctx context.Context, opts BuildVariantOpts) error {
|
||||
grafanaDir, err := filepath.Abs(opts.GrafanaDir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var (
|
||||
args = VariantBuildArgs(opts.Variant)
|
||||
)
|
||||
|
||||
for _, binary := range binaries {
|
||||
// Note that for Golang cmd paths we must use the relative path and the Linux file separators (/) even for Windows users.
|
||||
var (
|
||||
pkg = fmt.Sprintf("./pkg/cmd/%s", binary)
|
||||
stdout = bytes.NewBuffer(nil)
|
||||
stderr = bytes.NewBuffer(nil)
|
||||
)
|
||||
|
||||
args.Workdir = grafanaDir
|
||||
args.Stdout = stdout
|
||||
args.Stderr = stderr
|
||||
args.Package = pkg
|
||||
|
||||
if err := BuildGrafanaBinary(ctx, binary, opts.Version, args, opts.Edition); err != nil {
|
||||
return fmt.Errorf("failed to build %s for %s: %w\nstdout: %s\nstderr: %s", pkg, opts.Variant, err, stdout.String(), stderr.String())
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var ldFlagsStatic = []string{"-linkmode=external", "-extldflags=-static"}
|
||||
|
||||
var variantArgs = map[config.Variant]BuildArgs{
|
||||
config.VariantArmV6: {
|
||||
BuildOpts: golangutils.BuildOpts{
|
||||
GoOS: config.OSLinux,
|
||||
CGoEnabled: true,
|
||||
GoArch: config.ArchARM,
|
||||
GoArm: "6",
|
||||
CC: compilers.ArmV6,
|
||||
},
|
||||
DebArch: config.ArchARMHF,
|
||||
},
|
||||
config.VariantArmV7: {
|
||||
BuildOpts: golangutils.BuildOpts{
|
||||
GoOS: config.OSLinux,
|
||||
CGoEnabled: true,
|
||||
GoArch: config.ArchARM,
|
||||
GoArm: "7",
|
||||
CC: compilers.Armv7,
|
||||
},
|
||||
DebArch: config.ArchARMHF,
|
||||
RPMArch: config.ArchARMHFP,
|
||||
},
|
||||
config.VariantArmV7Musl: {
|
||||
BuildOpts: golangutils.BuildOpts{
|
||||
GoOS: config.OSLinux,
|
||||
CGoEnabled: true,
|
||||
GoArch: config.ArchARM,
|
||||
GoArm: "7",
|
||||
LibC: config.LibCMusl,
|
||||
CC: compilers.Armv7Musl,
|
||||
LdFlags: ldFlagsStatic,
|
||||
},
|
||||
},
|
||||
config.VariantArm64: {
|
||||
BuildOpts: golangutils.BuildOpts{
|
||||
GoOS: config.OSLinux,
|
||||
CGoEnabled: true,
|
||||
GoArch: config.ArchARM64,
|
||||
CC: compilers.Arm64,
|
||||
},
|
||||
DebArch: config.ArchARM64,
|
||||
RPMArch: "aarch64",
|
||||
},
|
||||
config.VariantArm64Musl: {
|
||||
BuildOpts: golangutils.BuildOpts{
|
||||
GoOS: config.OSLinux,
|
||||
GoArch: config.ArchARM64,
|
||||
CGoEnabled: true,
|
||||
CC: compilers.Arm64Musl,
|
||||
LibC: config.LibCMusl,
|
||||
LdFlags: ldFlagsStatic,
|
||||
},
|
||||
},
|
||||
config.VariantDarwinAmd64: {
|
||||
BuildOpts: golangutils.BuildOpts{
|
||||
GoOS: config.OSDarwin,
|
||||
CGoEnabled: true,
|
||||
GoArch: config.ArchAMD64,
|
||||
CC: compilers.Osx64,
|
||||
},
|
||||
},
|
||||
config.VariantWindowsAmd64: {
|
||||
BuildOpts: golangutils.BuildOpts{
|
||||
GoOS: config.OSWindows,
|
||||
GoArch: config.ArchAMD64,
|
||||
CC: compilers.Win64,
|
||||
CGoEnabled: true,
|
||||
CGoCFlags: "-D_WIN32_WINNT=0x0601",
|
||||
},
|
||||
},
|
||||
config.VariantLinuxAmd64: {
|
||||
BuildOpts: golangutils.BuildOpts{
|
||||
GoOS: config.OSLinux,
|
||||
GoArch: config.ArchAMD64,
|
||||
CC: compilers.LinuxX64,
|
||||
},
|
||||
DebArch: config.ArchAMD64,
|
||||
RPMArch: config.ArchAMD64,
|
||||
},
|
||||
config.VariantLinuxAmd64Musl: {
|
||||
BuildOpts: golangutils.BuildOpts{
|
||||
GoOS: config.OSLinux,
|
||||
GoArch: config.ArchAMD64,
|
||||
CC: compilers.LinuxX64Musl,
|
||||
LibC: config.LibCMusl,
|
||||
LdFlags: ldFlagsStatic,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func VariantBuildArgs(v config.Variant) BuildArgs {
|
||||
if val, ok := variantArgs[v]; ok {
|
||||
return val
|
||||
}
|
||||
|
||||
return BuildArgs{}
|
||||
}
|
||||
140
pkg/build/packaging/artifacts.go
Normal file
140
pkg/build/packaging/artifacts.go
Normal file
@@ -0,0 +1,140 @@
|
||||
package packaging
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/config"
|
||||
)
|
||||
|
||||
const ReleaseFolder = "release"
|
||||
const MainFolder = "main"
|
||||
const EnterpriseSfx = "-enterprise"
|
||||
const CacheSettings = "Cache-Control:public, max-age="
|
||||
|
||||
type buildArtifact struct {
|
||||
Os string
|
||||
Arch string
|
||||
urlPostfix string
|
||||
packagePostfix string
|
||||
}
|
||||
|
||||
type PublishConfig struct {
|
||||
config.Config
|
||||
|
||||
Edition config.Edition
|
||||
ReleaseMode config.ReleaseMode
|
||||
GrafanaAPIKey string
|
||||
WhatsNewURL string
|
||||
ReleaseNotesURL string
|
||||
DryRun bool
|
||||
TTL string
|
||||
SimulateRelease bool
|
||||
}
|
||||
|
||||
const rhelOS = "rhel"
|
||||
const debOS = "deb"
|
||||
|
||||
func (t buildArtifact) GetURL(baseArchiveURL string, cfg PublishConfig) string {
|
||||
rev := ""
|
||||
prefix := "-"
|
||||
if t.Os == debOS {
|
||||
prefix = "_"
|
||||
} else if t.Os == rhelOS {
|
||||
rev = "-1"
|
||||
}
|
||||
|
||||
version := cfg.Version
|
||||
verComponents := strings.Split(version, "-")
|
||||
if len(verComponents) > 2 {
|
||||
panic(fmt.Sprintf("Version string contains more than one hyphen: %q", version))
|
||||
}
|
||||
|
||||
switch t.Os {
|
||||
case debOS, rhelOS:
|
||||
if len(verComponents) > 1 {
|
||||
// With Debian and RPM packages, it's customary to prefix any pre-release component with a ~, since this
|
||||
// is considered of lower lexical value than the empty character, and this way pre-release versions are
|
||||
// considered to be of a lower version than the final version (which lacks this suffix).
|
||||
version = fmt.Sprintf("%s~%s", verComponents[0], verComponents[1])
|
||||
}
|
||||
}
|
||||
|
||||
// https://dl.grafana.com/oss/main/grafana_8.5.0~54094pre_armhf.deb: 404 Not Found
|
||||
url := fmt.Sprintf("%s%s%s%s%s%s", baseArchiveURL, t.packagePostfix, prefix, version, rev, t.urlPostfix)
|
||||
return url
|
||||
}
|
||||
|
||||
var ArtifactConfigs = []buildArtifact{
|
||||
{
|
||||
Os: debOS,
|
||||
Arch: "arm64",
|
||||
urlPostfix: "_arm64.deb",
|
||||
},
|
||||
{
|
||||
Os: rhelOS,
|
||||
Arch: "arm64",
|
||||
urlPostfix: ".aarch64.rpm",
|
||||
},
|
||||
{
|
||||
Os: "linux",
|
||||
Arch: "arm64",
|
||||
urlPostfix: ".linux-arm64.tar.gz",
|
||||
},
|
||||
{
|
||||
Os: debOS,
|
||||
Arch: "armv7",
|
||||
urlPostfix: "_armhf.deb",
|
||||
},
|
||||
{
|
||||
Os: debOS,
|
||||
Arch: "armv6",
|
||||
packagePostfix: "-rpi",
|
||||
urlPostfix: "_armhf.deb",
|
||||
},
|
||||
{
|
||||
Os: rhelOS,
|
||||
Arch: "armv7",
|
||||
urlPostfix: ".armhfp.rpm",
|
||||
},
|
||||
{
|
||||
Os: "linux",
|
||||
Arch: "armv6",
|
||||
urlPostfix: ".linux-armv6.tar.gz",
|
||||
},
|
||||
{
|
||||
Os: "linux",
|
||||
Arch: "armv7",
|
||||
urlPostfix: ".linux-armv7.tar.gz",
|
||||
},
|
||||
{
|
||||
Os: "darwin",
|
||||
Arch: "amd64",
|
||||
urlPostfix: ".darwin-amd64.tar.gz",
|
||||
},
|
||||
{
|
||||
Os: "deb",
|
||||
Arch: "amd64",
|
||||
urlPostfix: "_amd64.deb",
|
||||
},
|
||||
{
|
||||
Os: rhelOS,
|
||||
Arch: "amd64",
|
||||
urlPostfix: ".x86_64.rpm",
|
||||
},
|
||||
{
|
||||
Os: "linux",
|
||||
Arch: "amd64",
|
||||
urlPostfix: ".linux-amd64.tar.gz",
|
||||
},
|
||||
{
|
||||
Os: "win",
|
||||
Arch: "amd64",
|
||||
urlPostfix: ".windows-amd64.zip",
|
||||
},
|
||||
{
|
||||
Os: "win-installer",
|
||||
Arch: "amd64",
|
||||
urlPostfix: ".windows-amd64.msi",
|
||||
},
|
||||
}
|
||||
246
pkg/build/packaging/deb.go
Normal file
246
pkg/build/packaging/deb.go
Normal file
@@ -0,0 +1,246 @@
|
||||
package packaging
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/urfave/cli/v2"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/config"
|
||||
"github.com/grafana/grafana/pkg/build/fsutil"
|
||||
"github.com/grafana/grafana/pkg/infra/fs"
|
||||
)
|
||||
|
||||
func writeAptlyConf(dbDir, repoDir string) error {
|
||||
aptlyConf := fmt.Sprintf(`{
|
||||
"rootDir": "%s",
|
||||
"downloadConcurrency": 4,
|
||||
"downloadSpeedLimit": 0,
|
||||
"architectures": [],
|
||||
"dependencyFollowSuggests": false,
|
||||
"dependencyFollowRecommends": false,
|
||||
"dependencyFollowAllVariants": false,
|
||||
"dependencyFollowSource": false,
|
||||
"dependencyVerboseResolve": false,
|
||||
"gpgDisableSign": false,
|
||||
"gpgDisableVerify": false,
|
||||
"gpgProvider": "gpg2",
|
||||
"downloadSourcePackages": false,
|
||||
"skipLegacyPool": true,
|
||||
"ppaDistributorID": "ubuntu",
|
||||
"ppaCodename": "",
|
||||
"skipContentsPublishing": false,
|
||||
"FileSystemPublishEndpoints": {
|
||||
"repo": {
|
||||
"rootDir": "%s",
|
||||
"linkMethod": "copy"
|
||||
}
|
||||
},
|
||||
"S3PublishEndpoints": {},
|
||||
"SwiftPublishEndpoints": {}
|
||||
}
|
||||
`, dbDir, repoDir)
|
||||
home, err := os.UserHomeDir()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return os.WriteFile(filepath.Join(home, ".aptly.conf"), []byte(aptlyConf), 0600)
|
||||
}
|
||||
|
||||
// downloadDebs downloads Deb packages.
|
||||
func downloadDebs(cfg PublishConfig, workDir string) error {
|
||||
if cfg.Bucket == "" {
|
||||
panic("cfg.Bucket has to be set")
|
||||
}
|
||||
if !strings.HasSuffix(workDir, string(filepath.Separator)) {
|
||||
workDir += string(filepath.Separator)
|
||||
}
|
||||
|
||||
var version string
|
||||
if cfg.ReleaseMode.Mode == config.TagMode {
|
||||
if cfg.ReleaseMode.IsBeta {
|
||||
version = strings.ReplaceAll(cfg.Version, "-", "~")
|
||||
} else {
|
||||
version = cfg.Version
|
||||
}
|
||||
}
|
||||
if version == "" {
|
||||
panic(fmt.Sprintf("Unrecognized version mode %s", cfg.ReleaseMode.Mode))
|
||||
}
|
||||
|
||||
var sfx string
|
||||
switch cfg.Edition {
|
||||
case config.EditionOSS:
|
||||
case config.EditionEnterprise:
|
||||
sfx = EnterpriseSfx
|
||||
default:
|
||||
return fmt.Errorf("unrecognized edition %q", cfg.Edition)
|
||||
}
|
||||
|
||||
u := fmt.Sprintf("gs://%s/%s/%s/grafana%s_%s_*.deb*", cfg.Bucket,
|
||||
strings.ToLower(string(cfg.Edition)), ReleaseFolder, sfx, version)
|
||||
log.Printf("Downloading Deb packages %q...\n", u)
|
||||
args := []string{
|
||||
"-m",
|
||||
"cp",
|
||||
u,
|
||||
workDir,
|
||||
}
|
||||
//nolint:gosec
|
||||
cmd := exec.Command("gsutil", args...)
|
||||
if output, err := cmd.CombinedOutput(); err != nil {
|
||||
return fmt.Errorf("failed to download Deb packages %q: %w\n%s", u, err, output)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// UpdateDebRepo updates the Debian repository with the new release.
|
||||
func UpdateDebRepo(cfg PublishConfig, workDir string) error {
|
||||
if cfg.ReleaseMode.Mode != config.TagMode {
|
||||
panic(fmt.Sprintf("Unsupported version mode: %s", cfg.ReleaseMode.Mode))
|
||||
}
|
||||
|
||||
if cfg.ReleaseMode.IsTest {
|
||||
if cfg.Config.DebDBBucket == DefaultDebDBBucket {
|
||||
return fmt.Errorf("in test-release mode, the default Deb DB bucket shouldn't be used")
|
||||
}
|
||||
if cfg.Config.DebRepoBucket == DefaultDebRepoBucket {
|
||||
return fmt.Errorf("in test-release mode, the default Deb repo bucket shouldn't be used")
|
||||
}
|
||||
}
|
||||
|
||||
if err := downloadDebs(cfg, workDir); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
repoName := "grafana"
|
||||
if cfg.ReleaseMode.IsBeta {
|
||||
repoName = "beta"
|
||||
}
|
||||
|
||||
repoRoot, err := fsutil.CreateTempDir("deb-repo")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
if err := os.RemoveAll(repoRoot); err != nil {
|
||||
log.Printf("Failed to remove temporary directory %q: %s\n", repoRoot, err.Error())
|
||||
}
|
||||
}()
|
||||
|
||||
dbDir := filepath.Join(repoRoot, "db")
|
||||
repoDir := filepath.Join(repoRoot, "repo")
|
||||
tmpDir := filepath.Join(repoRoot, "tmp")
|
||||
for _, dpath := range []string{dbDir, repoDir, tmpDir} {
|
||||
if err := os.MkdirAll(dpath, 0750); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := writeAptlyConf(dbDir, repoDir); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Download the Debian repo database
|
||||
u := fmt.Sprintf("gs://%s/%s", cfg.DebDBBucket, strings.ToLower(string(cfg.Edition)))
|
||||
log.Printf("Downloading Debian repo database from %s...\n", u)
|
||||
//nolint:gosec
|
||||
cmd := exec.Command("gsutil", "-m", "rsync", "-r", "-d", u, dbDir)
|
||||
if output, err := cmd.CombinedOutput(); err != nil {
|
||||
return fmt.Errorf("failed to download Debian repo database: %w\n%s", err, output)
|
||||
}
|
||||
|
||||
if err := addPkgsToRepo(cfg, workDir, tmpDir, repoName); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.Println("Updating local Debian package repository...")
|
||||
// Update published local repository. This assumes that there exists already a local, published repo.
|
||||
for _, tp := range []string{"stable", "beta"} {
|
||||
passArg := fmt.Sprintf("-passphrase-file=%s", cfg.GPGPassPath)
|
||||
//nolint:gosec
|
||||
cmd := exec.Command("aptly", "publish", "update", "-batch", passArg, "-force-overwrite", tp,
|
||||
"filesystem:repo:grafana")
|
||||
if output, err := cmd.CombinedOutput(); err != nil {
|
||||
return cli.Exit(fmt.Sprintf("failed to update Debian %q repository: %s", tp, output), 1)
|
||||
}
|
||||
}
|
||||
|
||||
// Update database in GCS
|
||||
u = fmt.Sprintf("gs://%s/%s", cfg.DebDBBucket, strings.ToLower(string(cfg.Edition)))
|
||||
if cfg.DryRun {
|
||||
log.Printf("Simulating upload of Debian repo database to GCS (%s)\n", u)
|
||||
} else {
|
||||
log.Printf("Uploading Debian repo database to GCS (%s)...\n", u)
|
||||
//nolint:gosec
|
||||
cmd = exec.Command("gsutil", "-m", "rsync", "-r", "-d", dbDir, u)
|
||||
if output, err := cmd.CombinedOutput(); err != nil {
|
||||
return cli.Exit(fmt.Sprintf("failed to upload Debian repo database to GCS: %s", output), 1)
|
||||
}
|
||||
}
|
||||
|
||||
// Update metadata and binaries in repository bucket
|
||||
u = fmt.Sprintf("gs://%s/%s/deb", cfg.DebRepoBucket, strings.ToLower(string(cfg.Edition)))
|
||||
grafDir := filepath.Join(repoDir, "grafana")
|
||||
if cfg.DryRun {
|
||||
log.Printf("Simulating upload of Debian repo resources to GCS (%s)\n", u)
|
||||
} else {
|
||||
log.Printf("Uploading Debian repo resources to GCS (%s)...\n", u)
|
||||
//nolint:gosec
|
||||
cmd = exec.Command("gsutil", "-m", "rsync", "-r", "-d", grafDir, u)
|
||||
if output, err := cmd.CombinedOutput(); err != nil {
|
||||
return cli.Exit(fmt.Sprintf("failed to upload Debian repo resources to GCS: %s", output), 1)
|
||||
}
|
||||
allRepoResources := fmt.Sprintf("%s/**/*", u)
|
||||
log.Printf("Setting cache ttl for Debian repo resources on GCS (%s)...\n", allRepoResources)
|
||||
//nolint:gosec
|
||||
cmd = exec.Command("gsutil", "-m", "setmeta", "-h", CacheSettings+cfg.TTL, allRepoResources)
|
||||
if output, err := cmd.CombinedOutput(); err != nil {
|
||||
return cli.Exit(fmt.Sprintf("failed to set cache ttl for Debian repo resources on GCS: %s", output), 1)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func addPkgsToRepo(cfg PublishConfig, workDir, tmpDir, repoName string) error {
|
||||
var sfx string
|
||||
switch cfg.Edition {
|
||||
case config.EditionOSS:
|
||||
case config.EditionEnterprise:
|
||||
sfx = EnterpriseSfx
|
||||
default:
|
||||
return fmt.Errorf("unsupported edition %q", cfg.Edition)
|
||||
}
|
||||
|
||||
log.Printf("Adding packages to Debian %q repo...\n", repoName)
|
||||
// TODO: Be more specific about filename pattern
|
||||
debs, err := filepath.Glob(filepath.Join(workDir, fmt.Sprintf("grafana%s*.deb", sfx)))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, deb := range debs {
|
||||
basename := filepath.Base(deb)
|
||||
if strings.Contains(basename, "latest") {
|
||||
continue
|
||||
}
|
||||
|
||||
tgt := filepath.Join(tmpDir, basename)
|
||||
if err := fs.CopyFile(deb, tgt); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
// XXX: Adds too many packages in enterprise (Arve: What does this mean exactly?)
|
||||
//nolint:gosec
|
||||
cmd := exec.Command("aptly", "repo", "add", "-force-replace", repoName, tmpDir)
|
||||
if output, err := cmd.CombinedOutput(); err != nil {
|
||||
return cli.Exit(fmt.Sprintf("failed to add packages to local Debian repository: %s", output), 1)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
2
pkg/build/packaging/docs.go
Normal file
2
pkg/build/packaging/docs.go
Normal file
@@ -0,0 +1,2 @@
|
||||
// Package packaging holds functions and types for creating the tar.gz, deb, and rpm packages of Grafana.
|
||||
package packaging
|
||||
1
pkg/build/packaging/errors.go
Normal file
1
pkg/build/packaging/errors.go
Normal file
@@ -0,0 +1 @@
|
||||
package packaging
|
||||
1127
pkg/build/packaging/grafana.go
Normal file
1127
pkg/build/packaging/grafana.go
Normal file
File diff suppressed because it is too large
Load Diff
22
pkg/build/packaging/grafana_test.go
Normal file
22
pkg/build/packaging/grafana_test.go
Normal file
@@ -0,0 +1,22 @@
|
||||
package packaging_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/config"
|
||||
"github.com/grafana/grafana/pkg/build/packaging"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestPackageRegexp(t *testing.T) {
|
||||
t.Run("It should match enterprise2 packages", func(t *testing.T) {
|
||||
rgx := packaging.PackageRegexp(config.EditionEnterprise2)
|
||||
matches := []string{
|
||||
"grafana-enterprise2-1.2.3-4567pre.linux-amd64.tar.gz",
|
||||
"grafana-enterprise2-1.2.3-4567pre.linux-amd64.tar.gz.sha256",
|
||||
}
|
||||
for _, v := range matches {
|
||||
assert.Truef(t, rgx.MatchString(v), "'%s' should match regex '%s'", v, rgx.String())
|
||||
}
|
||||
})
|
||||
}
|
||||
370
pkg/build/packaging/rpm.go
Normal file
370
pkg/build/packaging/rpm.go
Normal file
@@ -0,0 +1,370 @@
|
||||
package packaging
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
// Consider switching this over to a community fork unless there is
|
||||
// an option to move us away from OpenPGP.
|
||||
"golang.org/x/crypto/openpgp" //nolint:staticcheck
|
||||
"golang.org/x/crypto/openpgp/armor" //nolint:staticcheck
|
||||
"golang.org/x/crypto/openpgp/packet" //nolint:staticcheck
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/config"
|
||||
"github.com/grafana/grafana/pkg/build/fsutil"
|
||||
"github.com/grafana/grafana/pkg/infra/fs"
|
||||
)
|
||||
|
||||
// UpdateRPMRepo updates the RPM repository with the new release.
|
||||
func UpdateRPMRepo(cfg PublishConfig, workDir string) error {
|
||||
if cfg.ReleaseMode.Mode != config.TagMode {
|
||||
panic(fmt.Sprintf("Unsupported version mode %s", cfg.ReleaseMode.Mode))
|
||||
}
|
||||
|
||||
if cfg.ReleaseMode.IsTest && cfg.Config.RPMRepoBucket == DefaultRPMRepoBucket {
|
||||
return fmt.Errorf("in test-release mode, the default RPM repo bucket shouldn't be used")
|
||||
}
|
||||
|
||||
if err := downloadRPMs(cfg, workDir); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
repoRoot, err := fsutil.CreateTempDir("rpm-repo")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
if err := os.RemoveAll(repoRoot); err != nil {
|
||||
log.Printf("Failed to remove %q: %s\n", repoRoot, err.Error())
|
||||
}
|
||||
}()
|
||||
|
||||
repoName := "rpm"
|
||||
if cfg.ReleaseMode.IsBeta {
|
||||
repoName = "rpm-beta"
|
||||
}
|
||||
folderURI := fmt.Sprintf("gs://%s/%s/%s", cfg.RPMRepoBucket, strings.ToLower(string(cfg.Edition)), repoName)
|
||||
|
||||
// Download the RPM database
|
||||
log.Printf("Downloading RPM database from GCS (%s)...\n", folderURI)
|
||||
//nolint:gosec
|
||||
cmd := exec.Command("gsutil", "-m", "rsync", "-r", "-d", folderURI, repoRoot)
|
||||
if output, err := cmd.CombinedOutput(); err != nil {
|
||||
return fmt.Errorf("failed to download RPM database from GCS: %w\n%s", err, output)
|
||||
}
|
||||
|
||||
// Add the new release to the repo
|
||||
var sfx string
|
||||
switch cfg.Edition {
|
||||
case config.EditionOSS:
|
||||
case config.EditionEnterprise:
|
||||
sfx = EnterpriseSfx
|
||||
default:
|
||||
return fmt.Errorf("unsupported edition %q", cfg.Edition)
|
||||
}
|
||||
allRPMs, err := filepath.Glob(filepath.Join(workDir, fmt.Sprintf("grafana%s-*.rpm", sfx)))
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to list RPMs in %q: %w", workDir, err)
|
||||
}
|
||||
rpms := []string{}
|
||||
for _, rpm := range allRPMs {
|
||||
if strings.Contains(rpm, "-latest") {
|
||||
continue
|
||||
}
|
||||
|
||||
rpms = append(rpms, rpm)
|
||||
}
|
||||
// XXX: What does the following comment mean?
|
||||
// adds to many files for enterprise
|
||||
for _, rpm := range rpms {
|
||||
if err := fs.CopyFile(rpm, filepath.Join(repoRoot, filepath.Base(rpm))); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
//nolint:gosec
|
||||
cmd = exec.Command("createrepo", repoRoot)
|
||||
if output, err := cmd.CombinedOutput(); err != nil {
|
||||
return fmt.Errorf("failed to create repo at %q: %w\n%s", repoRoot, err, output)
|
||||
}
|
||||
|
||||
if err := signRPMRepo(repoRoot, cfg); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Update the repo in GCS
|
||||
|
||||
// Sync packages first to avoid cache misses
|
||||
if cfg.DryRun {
|
||||
log.Printf("Simulating upload of RPMs to GCS (%s)\n", folderURI)
|
||||
} else {
|
||||
log.Printf("Uploading RPMs to GCS (%s)...\n", folderURI)
|
||||
args := []string{"-m", "cp"}
|
||||
args = append(args, rpms...)
|
||||
args = append(args, folderURI)
|
||||
//nolint:gosec
|
||||
cmd = exec.Command("gsutil", args...)
|
||||
if output, err := cmd.CombinedOutput(); err != nil {
|
||||
return fmt.Errorf("failed to upload RPMs to GCS: %w\n%s", err, output)
|
||||
}
|
||||
}
|
||||
|
||||
if cfg.DryRun {
|
||||
log.Printf("Simulating upload of RPM repo metadata to GCS (%s)\n", folderURI)
|
||||
} else {
|
||||
log.Printf("Uploading RPM repo metadata to GCS (%s)...\n", folderURI)
|
||||
//nolint:gosec
|
||||
cmd = exec.Command("gsutil", "-m", "rsync", "-r", "-d", repoRoot, folderURI)
|
||||
if output, err := cmd.CombinedOutput(); err != nil {
|
||||
return fmt.Errorf("failed to upload RPM repo metadata to GCS: %w\n%s", err, output)
|
||||
}
|
||||
allRepoResources := fmt.Sprintf("%s/**/*", folderURI)
|
||||
log.Printf("Setting cache ttl for RPM repo resources on GCS (%s)...\n", allRepoResources)
|
||||
//nolint:gosec
|
||||
cmd = exec.Command("gsutil", "-m", "setmeta", "-h", CacheSettings+cfg.TTL, allRepoResources)
|
||||
if output, err := cmd.CombinedOutput(); err != nil {
|
||||
return fmt.Errorf("failed to set cache ttl for RPM repo resources on GCS: %w\n%s", err, output)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// downloadRPMs downloads RPM packages.
|
||||
func downloadRPMs(cfg PublishConfig, workDir string) error {
|
||||
if !strings.HasSuffix(workDir, string(filepath.Separator)) {
|
||||
workDir += string(filepath.Separator)
|
||||
}
|
||||
var version string
|
||||
if cfg.ReleaseMode.Mode == config.TagMode {
|
||||
if cfg.ReleaseMode.IsBeta {
|
||||
version = strings.ReplaceAll(cfg.Version, "-", "~")
|
||||
} else {
|
||||
version = cfg.Version
|
||||
}
|
||||
}
|
||||
if version == "" {
|
||||
panic(fmt.Sprintf("Unrecognized version mode %s", cfg.ReleaseMode.Mode))
|
||||
}
|
||||
|
||||
var sfx string
|
||||
switch cfg.Edition {
|
||||
case config.EditionOSS:
|
||||
case config.EditionEnterprise:
|
||||
sfx = EnterpriseSfx
|
||||
default:
|
||||
return fmt.Errorf("unrecognized edition %q", cfg.Edition)
|
||||
}
|
||||
|
||||
u := fmt.Sprintf("gs://%s/%s/%s/grafana%s-%s-*.*.rpm*", cfg.Bucket,
|
||||
strings.ToLower(string(cfg.Edition)), ReleaseFolder, sfx, version)
|
||||
log.Printf("Downloading RPM packages %q...\n", u)
|
||||
args := []string{
|
||||
"-m",
|
||||
"cp",
|
||||
u,
|
||||
workDir,
|
||||
}
|
||||
//nolint:gosec
|
||||
cmd := exec.Command("gsutil", args...)
|
||||
if output, err := cmd.CombinedOutput(); err != nil {
|
||||
return fmt.Errorf("failed to download RPM packages %q: %w\n%s", u, err, output)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func getPublicKey(cfg PublishConfig) (*packet.PublicKey, error) {
|
||||
f, err := os.Open(cfg.GPGPublicKey)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open %q: %w", cfg.GPGPublicKey, err)
|
||||
}
|
||||
defer func(f *os.File) {
|
||||
err := f.Close()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}(f)
|
||||
|
||||
block, err := armor.Decode(f)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if block.Type != openpgp.PublicKeyType {
|
||||
return nil, fmt.Errorf("invalid public key block type: %q", block.Type)
|
||||
}
|
||||
|
||||
packetReader := packet.NewReader(block.Body)
|
||||
pkt, err := packetReader.Next()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
key, ok := pkt.(*packet.PublicKey)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("got non-public key from packet reader: %T", pkt)
|
||||
}
|
||||
|
||||
return key, nil
|
||||
}
|
||||
|
||||
func getPrivateKey(cfg PublishConfig) (*packet.PrivateKey, error) {
|
||||
f, err := os.Open(cfg.GPGPrivateKey)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open %q: %w", cfg.GPGPrivateKey, err)
|
||||
}
|
||||
defer func(f *os.File) {
|
||||
err := f.Close()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}(f)
|
||||
|
||||
passphraseB, err := os.ReadFile(cfg.GPGPassPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read %q: %w", cfg.GPGPrivateKey, err)
|
||||
}
|
||||
passphraseB = bytes.TrimSuffix(passphraseB, []byte("\n"))
|
||||
|
||||
block, err := armor.Decode(f)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if block.Type != openpgp.PrivateKeyType {
|
||||
return nil, fmt.Errorf("invalid private key block type: %q", block.Type)
|
||||
}
|
||||
|
||||
packetReader := packet.NewReader(block.Body)
|
||||
pkt, err := packetReader.Next()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
key, ok := pkt.(*packet.PrivateKey)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("got non-private key from packet reader: %T", pkt)
|
||||
}
|
||||
|
||||
if err := key.Decrypt(passphraseB); err != nil {
|
||||
return nil, fmt.Errorf("failed to decrypt private key: %w", err)
|
||||
}
|
||||
return key, nil
|
||||
}
|
||||
|
||||
// signRPMRepo signs an RPM repository using PGP.
|
||||
// The signature gets written to the file repodata/repomd.xml.asc.
|
||||
func signRPMRepo(repoRoot string, cfg PublishConfig) error {
|
||||
if cfg.GPGPublicKey == "" || cfg.GPGPrivateKey == "" {
|
||||
return fmt.Errorf("private or public key is empty")
|
||||
}
|
||||
|
||||
log.Printf("Signing RPM repo")
|
||||
|
||||
pubKey, err := getPublicKey(cfg)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
privKey, err := getPrivateKey(cfg)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
pcfg := packet.Config{
|
||||
DefaultHash: crypto.SHA256,
|
||||
DefaultCipher: packet.CipherAES256,
|
||||
DefaultCompressionAlgo: packet.CompressionZLIB,
|
||||
CompressionConfig: &packet.CompressionConfig{
|
||||
Level: 9,
|
||||
},
|
||||
RSABits: 4096,
|
||||
}
|
||||
currentTime := pcfg.Now()
|
||||
uid := packet.NewUserId("", "", "")
|
||||
|
||||
isPrimaryID := false
|
||||
keyLifetimeSecs := uint32(86400 * 365)
|
||||
signer := openpgp.Entity{
|
||||
PrimaryKey: pubKey,
|
||||
PrivateKey: privKey,
|
||||
Identities: map[string]*openpgp.Identity{
|
||||
uid.Id: {
|
||||
Name: uid.Name,
|
||||
UserId: uid,
|
||||
SelfSignature: &packet.Signature{
|
||||
CreationTime: currentTime,
|
||||
SigType: packet.SigTypePositiveCert,
|
||||
PubKeyAlgo: packet.PubKeyAlgoRSA,
|
||||
Hash: pcfg.Hash(),
|
||||
IsPrimaryId: &isPrimaryID,
|
||||
FlagsValid: true,
|
||||
FlagSign: true,
|
||||
FlagCertify: true,
|
||||
IssuerKeyId: &pubKey.KeyId,
|
||||
},
|
||||
},
|
||||
},
|
||||
Subkeys: []openpgp.Subkey{
|
||||
{
|
||||
PublicKey: pubKey,
|
||||
PrivateKey: privKey,
|
||||
Sig: &packet.Signature{
|
||||
CreationTime: currentTime,
|
||||
SigType: packet.SigTypeSubkeyBinding,
|
||||
PubKeyAlgo: packet.PubKeyAlgoRSA,
|
||||
Hash: pcfg.Hash(),
|
||||
PreferredHash: []uint8{8}, // SHA-256
|
||||
FlagsValid: true,
|
||||
FlagEncryptStorage: true,
|
||||
FlagEncryptCommunications: true,
|
||||
IssuerKeyId: &pubKey.KeyId,
|
||||
KeyLifetimeSecs: &keyLifetimeSecs,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// Ignore gosec G304 as this function is only used in the build process.
|
||||
//nolint:gosec
|
||||
freader, err := os.Open(filepath.Join(repoRoot, "repodata", "repomd.xml"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func(freader *os.File) {
|
||||
err := freader.Close()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}(freader)
|
||||
|
||||
// Ignore gosec G304 as this function is only used in the build process.
|
||||
//nolint:gosec
|
||||
sigwriter, err := os.Create(filepath.Join(repoRoot, "repodata", "repomd.xml.asc"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func(sigwriter *os.File) {
|
||||
err := sigwriter.Close()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}(sigwriter)
|
||||
|
||||
if err := openpgp.ArmoredDetachSignText(sigwriter, &signer, freader, nil); err != nil {
|
||||
return fmt.Errorf("failed to write PGP signature: %w", err)
|
||||
}
|
||||
|
||||
if err := sigwriter.Close(); err != nil {
|
||||
return fmt.Errorf("failed to write PGP signature: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
146
pkg/build/packaging/rpm_test.go
Normal file
146
pkg/build/packaging/rpm_test.go
Normal file
@@ -0,0 +1,146 @@
|
||||
package packaging
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/config"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
const pubKey = `-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||
Version: OpenPGP.js v4.10.10
|
||||
Comment: https://openpgpjs.org
|
||||
|
||||
xsBNBGM1b9wBCADZM49X7vwOS93KbgA6yhpwrYf8ZlzksGcDaYgp1IzvqHbs
|
||||
xeU1mmBYVH/bSKRDG0tt3Qdky4Nvl4Oqd+g0e2ZGjmlEy9zUiPTTK/BtXT+5
|
||||
s8oqih2NIAkyF91BNZABAgvh/vJdYImhYeUQBDqMJgqZ/Y/Ha31N7rSW+jUt
|
||||
LHspbN0ztJYjuEd/bg2NKH7Gs/AyNvX9IQTC4k7iRRafx7q/PBCVtsk+NCwz
|
||||
BEkL93xpAdcdYiMNrRP2eIHQjBmNZ/oUCkcDsLCBvcSq6P2lGpNnpPzVoTJf
|
||||
v2qrWkVn5txJJsOkmBGpEDbECPunVilrWO6RPomP0yYkr6NE4XeCJ3QhABEB
|
||||
AAHNGWR1bW15IDxkdW1teUBob3RtYWlsLmNvbT7CwI0EEAEIACAFAmM1b9wG
|
||||
CwkHCAMCBBUICgIEFgIBAAIZAQIbAwIeAQAhCRAoJ1i5w6kkAxYhBCQv+iwt
|
||||
IFn7vj9PLygnWLnDqSQDPxkH/0Ju2Cah+bOxl09uv2Ft2BVlQh0u+wJyRVgs
|
||||
KxTxldAXFZwMrN4wK/GUoGWDiy2tzNtoVE6GpxWUj+LvSGFaVLNVjW+Le77I
|
||||
BP/sl1wKHJbQhseKc7Mz5Zj3i0F1FPM+rLik7tNk6kiEBqYVyyXahyT98Hu1
|
||||
1OKEV+8NiRG47iNgd/dpgEdVSS4DN/dL6m5q+CVy9YnlR+wXxF/2xcMmWBzR
|
||||
V2cPVw0JzunpUV8lDDQ/n1sPw61D3oL1aH0bkn8aA8pEceKOVIYOaja7LkLX
|
||||
uSlROlALA/M2fuubradW9I3FcrJNn+/xA52el2l/Hn/Syf9GQV/Ll/R+qKIo
|
||||
Z57xWd7OwE0EYzVv3AEIAJl/PNYOF2prNKY58BfZ74XurDb9mNlZ1wsIqrOu
|
||||
J/euzHEnzkCAjMUuXV7wcugjQlmpcZn6Y0QmQ2uX7SwPCMovDvngbXeAfbdd
|
||||
6FUKecQ0sG54Plm8HSMNdjetdUVl7ACxjJO8Rdc/Asx7ua7gMm42CVfqMj4L
|
||||
qN5foUBlaKJ1iGKUpQ+673UQWMYeOBuu9G8awbSzGaphN97CIX7xEMGzGeff
|
||||
yHLHK+MsfX935uDgDwJQzxJKEugIJDMKgWOLgVz1jRCsJKHlywHTWpVuMiKY
|
||||
Wnuq4tDNLBUQtaRL7uclG7Wejw/XNN0uD/zNHPgF5rmlYHVhrtDbBCP2XqTn
|
||||
WU8AEQEAAcLAdgQYAQgACQUCYzVv3AIbDAAhCRAoJ1i5w6kkAxYhBCQv+iwt
|
||||
IFn7vj9PLygnWLnDqSQDFqYH/AkdNaPUQlE7RQBigNRGOFBuqjhbLsV/rZf+
|
||||
/4K6wDHojM606lgLm57T4NUXnk53VIF3KO8+v8N11mCtPb+zBngfvVU14COC
|
||||
HEDNdOK19TlR+tH25cftfUiF+OJsgMQysErGuFEtwLE6TNzpQIcnw7SbjxMr
|
||||
EGacF9xCBKexB6MlR3GwJ2LBUJm3Lq/fvqImztoTlKDsrpk4JOH5FfYG+G2f
|
||||
1zU73fVsCCElX4qA/49rRQf0RNfhjRjmHULP8hSvCXUEhfiBggEgxof/vKlC
|
||||
qauHC55luuIeabju8HaXTjpz019cq+3IUgewX/ky0PhQXEW9SoODKabPY2yS
|
||||
yUbHFm4=
|
||||
=OCSx
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
||||
`
|
||||
|
||||
const privKey = `-----BEGIN PGP PRIVATE KEY BLOCK-----
|
||||
Version: OpenPGP.js v4.10.10
|
||||
Comment: https://openpgpjs.org
|
||||
|
||||
xcMGBGM1b9wBCADZM49X7vwOS93KbgA6yhpwrYf8ZlzksGcDaYgp1IzvqHbs
|
||||
xeU1mmBYVH/bSKRDG0tt3Qdky4Nvl4Oqd+g0e2ZGjmlEy9zUiPTTK/BtXT+5
|
||||
s8oqih2NIAkyF91BNZABAgvh/vJdYImhYeUQBDqMJgqZ/Y/Ha31N7rSW+jUt
|
||||
LHspbN0ztJYjuEd/bg2NKH7Gs/AyNvX9IQTC4k7iRRafx7q/PBCVtsk+NCwz
|
||||
BEkL93xpAdcdYiMNrRP2eIHQjBmNZ/oUCkcDsLCBvcSq6P2lGpNnpPzVoTJf
|
||||
v2qrWkVn5txJJsOkmBGpEDbECPunVilrWO6RPomP0yYkr6NE4XeCJ3QhABEB
|
||||
AAH+CQMIuDEg1p2Y6zbg0EQ3JvsP7VQBGsuXg9khTjktoxhwici/d+rcIW7Q
|
||||
SuKWJGqs83LTeeGmS+9etNtf3LqRdPnI7f0qbT47mAqvp2gn7Rvbrabk+5Jj
|
||||
AQS/DDLlWNiWsPrMBMZ7TZpiQ+g7gnIZaV10taFupYJr69AjtED+NPu8LOvZ
|
||||
2ItK9xBqOwl5mkNe7ps/uTT6jwYSWxeObp4ymnLDLONY3eHuaYP9QB/NSlw0
|
||||
80Wo5qBPljlU8JdbEoLFU4gY6wkEbLa/DVbEVXSHfWVtr8jZbzHW39TBxpG2
|
||||
Dxk52EVyu8Gf9XIQN2ZjDP3CzBGmlxJjLxLUD4GmRSPaDGK7LCN9ZztaXy3Y
|
||||
WtF6RJfNzEoDdCaV0kkM3AskQDsQ+CWsDVsbbQyDtfncVG6cDzqmoDrBCSq1
|
||||
Bsoz07k2hj9VP0aP2xU78qcpJWO2rmhAHy9W2NqjXSBJriy1JXrK5o2/lUUr
|
||||
94R8NLvqeVbInUw/zovVctaujHIBhNKL9wn2T0LWrA2OEJUz0HWo6ZQSaNzl
|
||||
Obtz0M8gCj/4sDYjRAiDk50FzOcZp8ijYQFVypQTVzHki5T/JfvBnMpo+4Uc
|
||||
93QB1woyiZuJCIj7DpY3MkZ5fTDtgJPa+0k8r+lPnAmE6auGUaH7JRKhbBu0
|
||||
8faDwaiSv3kD3EEDffoWX/axLLYta9jTDnitTXbf1jY03pdJeiU/ZX0BQTZi
|
||||
pehZ/6yi/qXM/F8HDVEWriSLqVsMLrXXeFIojAc3fJ/QPpAZSx6E/Fe2xh8c
|
||||
yURov5krU1zNJDwqC3SjHsHQ/UlLtamDDmmuXX+xb1CwIDd6WksGsCbe/LoN
|
||||
TxViV4hOjIeh5TwRP5jQaqsVKCT8fzoDrRXy76taT+Zaaen+J6rC51HQwyEq
|
||||
Qgf1e7WodzN3r10UV6/L/wNkfdWJgf5MzRlkdW1teSA8ZHVtbXlAaG90bWFp
|
||||
bC5jb20+wsCNBBABCAAgBQJjNW/cBgsJBwgDAgQVCAoCBBYCAQACGQECGwMC
|
||||
HgEAIQkQKCdYucOpJAMWIQQkL/osLSBZ+74/Ty8oJ1i5w6kkAz8ZB/9Cbtgm
|
||||
ofmzsZdPbr9hbdgVZUIdLvsCckVYLCsU8ZXQFxWcDKzeMCvxlKBlg4strczb
|
||||
aFROhqcVlI/i70hhWlSzVY1vi3u+yAT/7JdcChyW0IbHinOzM+WY94tBdRTz
|
||||
Pqy4pO7TZOpIhAamFcsl2ock/fB7tdTihFfvDYkRuO4jYHf3aYBHVUkuAzf3
|
||||
S+puavglcvWJ5UfsF8Rf9sXDJlgc0VdnD1cNCc7p6VFfJQw0P59bD8OtQ96C
|
||||
9Wh9G5J/GgPKRHHijlSGDmo2uy5C17kpUTpQCwPzNn7rm62nVvSNxXKyTZ/v
|
||||
8QOdnpdpfx5/0sn/RkFfy5f0fqiiKGee8Vnex8MGBGM1b9wBCACZfzzWDhdq
|
||||
azSmOfAX2e+F7qw2/ZjZWdcLCKqzrif3rsxxJ85AgIzFLl1e8HLoI0JZqXGZ
|
||||
+mNEJkNrl+0sDwjKLw754G13gH23XehVCnnENLBueD5ZvB0jDXY3rXVFZewA
|
||||
sYyTvEXXPwLMe7mu4DJuNglX6jI+C6jeX6FAZWiidYhilKUPuu91EFjGHjgb
|
||||
rvRvGsG0sxmqYTfewiF+8RDBsxnn38hyxyvjLH1/d+bg4A8CUM8SShLoCCQz
|
||||
CoFji4Fc9Y0QrCSh5csB01qVbjIimFp7quLQzSwVELWkS+7nJRu1no8P1zTd
|
||||
Lg/8zRz4Bea5pWB1Ya7Q2wQj9l6k51lPABEBAAH+CQMIwr3YSD15lYrgItoy
|
||||
MDsrWqMMHJsSxusbQiK0KLgjFBuDuTolsu9zqQCHEm2dxChqT+yQ6AeeynRD
|
||||
pDMVkHEvhShvGUhB6Bu5wClHj8+xFpyprShE/KbEuppNdfIRgWVYc7UX+TYz
|
||||
6BymqhzKyIw2Q33ocrXgTRZ02HM7urKVvAhsJCEff0paByOzCspiv/TPRihi
|
||||
7GAZY0wFLDPe9qr+07ExT2ndMDX8Xb1mlg8IeaSWUaNilm7M8oW3xnUBnXeD
|
||||
XglTkObGeRVXAINim9uL4soT3lamN4QwgBus9WzFqOOCMk11fjatY8kY1zX9
|
||||
epO27igGtMwTFl11XcQLlFyvlgPBeWtFam7RiDPa3VF0XubmBYZBmqWpccWs
|
||||
xl0xHCtUK7Pd0O4kSqxsL9cB0MX9iR1yPkM8wA++Mp6pEfNcXUrGIdlie0H5
|
||||
aCq8rguYG5VuFosSUatdCbpRVGBxGnhxHes0mNTPgwAoAVNYBWXH5iq5HxKy
|
||||
i3Zy5V7ZKSyDrfg/0AajtDW5h3g+wglUI9UCdT4tNLFwYbhHqGH2xdBztYI0
|
||||
iSJ7COLmo26smkA8UXxsrlw8PWPzpbhQOG06EbMjncJimJDMI1YDC6ag7M5l
|
||||
OcG9uXZQ22ipAz5CSPtyL0/0WAp4yyn1VQRBK42n/y9ld+dMbuq6majazb15
|
||||
6sEgHUKERcwGs0Ftfj5Zamwhm7ZoIe26XEqvcshpQpv1Q9hktluVeSbiVaBe
|
||||
Nl8zUZHlo/0zUc5j7G5Up58t+ChSsyOFJGM7CGkKHHawBZYCs0EcpsdAPr3T
|
||||
1C8A0Wt9POTETYM4pZFOoLds6VTolZZcxeBN5YPoN2kbwFpOgPJN09Zz8z8S
|
||||
4psQRV4KQ92XDPZ/6q2BH5i2+F2ZwUsvCR4DwgzbVGZSRV6mM7lkjZSmnWfC
|
||||
AE7DUl7XwsB2BBgBCAAJBQJjNW/cAhsMACEJECgnWLnDqSQDFiEEJC/6LC0g
|
||||
Wfu+P08vKCdYucOpJAMWpgf8CR01o9RCUTtFAGKA1EY4UG6qOFsuxX+tl/7/
|
||||
grrAMeiMzrTqWAubntPg1ReeTndUgXco7z6/w3XWYK09v7MGeB+9VTXgI4Ic
|
||||
QM104rX1OVH60fblx+19SIX44myAxDKwSsa4US3AsTpM3OlAhyfDtJuPEysQ
|
||||
ZpwX3EIEp7EHoyVHcbAnYsFQmbcur9++oibO2hOUoOyumTgk4fkV9gb4bZ/X
|
||||
NTvd9WwIISVfioD/j2tFB/RE1+GNGOYdQs/yFK8JdQSF+IGCASDGh/+8qUKp
|
||||
q4cLnmW64h5puO7wdpdOOnPTX1yr7chSB7Bf+TLQ+FBcRb1Kg4Mpps9jbJLJ
|
||||
RscWbg==
|
||||
=KJNy
|
||||
-----END PGP PRIVATE KEY BLOCK-----
|
||||
`
|
||||
|
||||
// Dummy GPG keys, used only for testing
|
||||
// nolint:gosec
|
||||
const passPhrase = `MkDgjkrgdGxt`
|
||||
|
||||
func TestSignRPMRepo(t *testing.T) {
|
||||
repoDir := t.TempDir()
|
||||
workDir := t.TempDir()
|
||||
pubKeyPath := filepath.Join(workDir, "pub.key")
|
||||
err := os.WriteFile(pubKeyPath, []byte(pubKey), 0600)
|
||||
require.NoError(t, err)
|
||||
privKeyPath := filepath.Join(workDir, "priv.key")
|
||||
err = os.WriteFile(privKeyPath, []byte(privKey), 0600)
|
||||
require.NoError(t, err)
|
||||
passPhrasePath := filepath.Join(workDir, "passphrase")
|
||||
err = os.WriteFile(passPhrasePath, []byte(passPhrase), 0600)
|
||||
require.NoError(t, err)
|
||||
err = os.Mkdir(filepath.Join(repoDir, "repodata"), 0700)
|
||||
require.NoError(t, err)
|
||||
err = os.WriteFile(filepath.Join(repoDir, "repodata", "repomd.xml"), []byte("<xml></xml>"), 0600)
|
||||
require.NoError(t, err)
|
||||
|
||||
cfg := PublishConfig{
|
||||
Config: config.Config{
|
||||
GPGPrivateKey: privKeyPath,
|
||||
GPGPublicKey: pubKeyPath,
|
||||
GPGPassPath: passPhrasePath,
|
||||
},
|
||||
}
|
||||
|
||||
err = signRPMRepo(repoDir, cfg)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
66
pkg/build/plugins/build.go
Normal file
66
pkg/build/plugins/build.go
Normal file
@@ -0,0 +1,66 @@
|
||||
package plugins
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/config"
|
||||
"github.com/grafana/grafana/pkg/build/errutil"
|
||||
"github.com/grafana/grafana/pkg/build/syncutil"
|
||||
"github.com/grafana/grafana/pkg/infra/fs"
|
||||
)
|
||||
|
||||
type PluginSigningMode = int
|
||||
|
||||
// BuildPlugins builds internal plugins.
|
||||
// The built plugins are placed in plugins-bundled/dist/.
|
||||
func Build(ctx context.Context, grafanaDir string, p syncutil.WorkerPool, g *errutil.Group, verMode *config.BuildConfig) error {
|
||||
log.Printf("Building plugins in %q...", grafanaDir)
|
||||
|
||||
root := filepath.Join(grafanaDir, "plugins-bundled", "internal")
|
||||
fis, err := os.ReadDir(root)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for i := range fis {
|
||||
fi := fis[i]
|
||||
if !fi.IsDir() {
|
||||
continue
|
||||
}
|
||||
|
||||
dpath := filepath.Join(root, fi.Name())
|
||||
|
||||
p.Schedule(g.Wrap(func() error {
|
||||
log.Printf("Building plugin %q...", dpath)
|
||||
|
||||
cmd := exec.Command("yarn", "build")
|
||||
cmd.Dir = dpath
|
||||
if output, err := cmd.CombinedOutput(); err != nil {
|
||||
return fmt.Errorf("yarn build failed: %s", output)
|
||||
}
|
||||
|
||||
dstPath := filepath.Join("plugins-bundled", "dist", fi.Name())
|
||||
if err := fs.CopyRecursive(filepath.Join(dpath, "dist"), dstPath); err != nil {
|
||||
return err
|
||||
}
|
||||
if !verMode.PluginSignature.Sign {
|
||||
return nil
|
||||
}
|
||||
|
||||
return BuildManifest(ctx, dstPath, verMode.PluginSignature.AdminSign)
|
||||
}))
|
||||
}
|
||||
|
||||
if err := g.Wait(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.Printf("Built all plug-ins successfully!")
|
||||
|
||||
return nil
|
||||
}
|
||||
118
pkg/build/plugins/download.go
Normal file
118
pkg/build/plugins/download.go
Normal file
@@ -0,0 +1,118 @@
|
||||
package plugins
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/errutil"
|
||||
"github.com/grafana/grafana/pkg/build/syncutil"
|
||||
)
|
||||
|
||||
// logCloseError executes the closeFunc; if it returns an error, it is logged by the log package.
|
||||
func logCloseError(closeFunc func() error) {
|
||||
if err := closeFunc(); err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
}
|
||||
|
||||
// logCloseError executes the closeFunc; if it returns an error, it is logged by the log package.
|
||||
func logError(err error) {
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
}
|
||||
|
||||
// pluginManifest has details of an external plugin package.
|
||||
type pluginManifest struct {
|
||||
Name string `json:"name"`
|
||||
Version string `json:"version"`
|
||||
Checksum string `json:"checksum"`
|
||||
}
|
||||
|
||||
// pluginsManifest represents a manifest of Grafana's external plugins.
|
||||
type pluginsManifest struct {
|
||||
Plugins []pluginManifest `json:"plugins"`
|
||||
}
|
||||
|
||||
// downloadPlugins downloads Grafana plugins that should be bundled into packages.
|
||||
//
|
||||
// The plugin archives are downloaded into <grafanaDir>/plugins-bundled.
|
||||
func Download(ctx context.Context, grafanaDir string, p syncutil.WorkerPool) error {
|
||||
g, _ := errutil.GroupWithContext(ctx)
|
||||
|
||||
log.Println("Downloading external plugins...")
|
||||
|
||||
var m pluginsManifest
|
||||
manifestPath := filepath.Join(grafanaDir, "plugins-bundled", "external.json")
|
||||
//nolint:gosec
|
||||
manifestB, err := os.ReadFile(manifestPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to open plugins manifest %q: %w", manifestPath, err)
|
||||
}
|
||||
if err := json.Unmarshal(manifestB, &m); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for i := range m.Plugins {
|
||||
pm := m.Plugins[i]
|
||||
p.Schedule(g.Wrap(func() error {
|
||||
tgt := filepath.Join(grafanaDir, "plugins-bundled", fmt.Sprintf("%s-%s.zip", pm.Name, pm.Version))
|
||||
//nolint:gosec
|
||||
out, err := os.Create(tgt)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer logCloseError(out.Close)
|
||||
|
||||
u := fmt.Sprintf("http://storage.googleapis.com/plugins-ci/plugins/%s/%s-%s.zip", pm.Name, pm.Name,
|
||||
pm.Version)
|
||||
log.Printf("Downloading plugin %q to %q...", u, tgt)
|
||||
// nolint:gosec
|
||||
resp, err := http.Get(u)
|
||||
if err != nil {
|
||||
return fmt.Errorf("downloading %q failed: %w", u, err)
|
||||
}
|
||||
defer logError(resp.Body.Close())
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return fmt.Errorf("failed to download %q, status code %d", u, resp.StatusCode)
|
||||
}
|
||||
|
||||
if _, err := io.Copy(out, resp.Body); err != nil {
|
||||
return fmt.Errorf("downloading %q failed: %w", u, err)
|
||||
}
|
||||
if err := out.Close(); err != nil {
|
||||
return fmt.Errorf("downloading %q failed: %w", u, err)
|
||||
}
|
||||
|
||||
//nolint:gosec
|
||||
fd, err := os.Open(tgt)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer logCloseError(fd.Close)
|
||||
|
||||
h := sha256.New()
|
||||
if _, err := io.Copy(h, fd); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
chksum := hex.EncodeToString(h.Sum(nil))
|
||||
if chksum != pm.Checksum {
|
||||
return fmt.Errorf("plugin %q has bad checksum: %s (expected %s)", u, chksum, pm.Checksum)
|
||||
}
|
||||
|
||||
return Unzip(tgt, filepath.Join(grafanaDir, "plugins-bundled"))
|
||||
}))
|
||||
}
|
||||
|
||||
return g.Wait()
|
||||
}
|
||||
204
pkg/build/plugins/manifest.go
Normal file
204
pkg/build/plugins/manifest.go
Normal file
@@ -0,0 +1,204 @@
|
||||
package plugins
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
type manifest struct {
|
||||
Plugin string `json:"plugin"`
|
||||
Version string `json:"version"`
|
||||
Files map[string]string `json:"files"`
|
||||
}
|
||||
|
||||
func getManifest(dpath string, chksums map[string]string) (manifest, error) {
|
||||
m := manifest{}
|
||||
|
||||
type pluginInfo struct {
|
||||
Version string `json:"version"`
|
||||
}
|
||||
|
||||
type plugin struct {
|
||||
ID string `json:"id"`
|
||||
Info pluginInfo `json:"info"`
|
||||
}
|
||||
|
||||
//nolint:gosec
|
||||
f, err := os.Open(filepath.Join(dpath, "plugin.json"))
|
||||
if err != nil {
|
||||
return m, err
|
||||
}
|
||||
decoder := json.NewDecoder(f)
|
||||
var p plugin
|
||||
if err := decoder.Decode(&p); err != nil {
|
||||
return m, err
|
||||
}
|
||||
|
||||
if p.ID == "" {
|
||||
return m, fmt.Errorf("plugin.json doesn't define id")
|
||||
}
|
||||
if p.Info.Version == "" {
|
||||
return m, fmt.Errorf("plugin.json doesn't define info.version")
|
||||
}
|
||||
|
||||
return manifest{
|
||||
Plugin: p.ID,
|
||||
Version: p.Info.Version,
|
||||
Files: chksums,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// BuildManifest requests a plugin's signed manifest file fromt he Grafana API.
|
||||
// If signingAdmin is true, the manifest signing admin endpoint (without plugin ID) will be used, and requires
|
||||
// an admin API key.
|
||||
func BuildManifest(ctx context.Context, dpath string, signingAdmin bool) error {
|
||||
log.Printf("Building manifest for plug-in at %q", dpath)
|
||||
|
||||
apiKey := os.Getenv("GRAFANA_API_KEY")
|
||||
if apiKey == "" {
|
||||
return fmt.Errorf("GRAFANA_API_KEY must be set")
|
||||
}
|
||||
|
||||
manifestPath := filepath.Join(dpath, "MANIFEST.txt")
|
||||
chksums, err := getChksums(dpath, manifestPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
m, err := getManifest(dpath, chksums)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
b := bytes.NewBuffer(nil)
|
||||
encoder := json.NewEncoder(b)
|
||||
if err := encoder.Encode(&m); err != nil {
|
||||
return err
|
||||
}
|
||||
jsonB := b.Bytes()
|
||||
u := "https://grafana.com/api/plugins/ci/sign"
|
||||
if !signingAdmin {
|
||||
u = fmt.Sprintf("https://grafana.com/api/plugins/%s/ci/sign", m.Plugin)
|
||||
}
|
||||
log.Printf("Requesting signed manifest from Grafana API...")
|
||||
req, err := http.NewRequestWithContext(ctx, "POST", u, bytes.NewReader(jsonB))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", apiKey))
|
||||
req.Header.Add("Content-Type", "application/json")
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get signed manifest from Grafana API: %w", err)
|
||||
}
|
||||
defer func() {
|
||||
if err := resp.Body.Close(); err != nil {
|
||||
log.Println("failed to close response body, err: %w", err)
|
||||
}
|
||||
}()
|
||||
if resp.StatusCode != 200 {
|
||||
msg, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
log.Printf("Failed to read response body: %s", err)
|
||||
msg = []byte("")
|
||||
}
|
||||
return fmt.Errorf("request for signed manifest failed with status code %d: %s", resp.StatusCode, string(msg))
|
||||
}
|
||||
|
||||
log.Printf("Successfully signed manifest via Grafana API, writing to %q", manifestPath)
|
||||
//nolint:gosec
|
||||
f, err := os.Create(manifestPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create %s: %w", manifestPath, err)
|
||||
}
|
||||
defer func() {
|
||||
if err := f.Close(); err != nil {
|
||||
log.Println("failed to close file, err: %w", err)
|
||||
}
|
||||
}()
|
||||
if _, err := io.Copy(f, resp.Body); err != nil {
|
||||
return fmt.Errorf("failed to write %s: %w", manifestPath, err)
|
||||
}
|
||||
if err := f.Close(); err != nil {
|
||||
return fmt.Errorf("failed to write %s: %w", manifestPath, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func getChksums(dpath, manifestPath string) (map[string]string, error) {
|
||||
manifestPath = filepath.Clean(manifestPath)
|
||||
|
||||
chksums := map[string]string{}
|
||||
if err := filepath.Walk(dpath, func(path string, fi os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if fi.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
path = filepath.Clean(path)
|
||||
|
||||
// Handle symbolic links
|
||||
if fi.Mode()&os.ModeSymlink == os.ModeSymlink {
|
||||
finalPath, err := filepath.EvalSymlinks(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.Printf("Handling symlink %q, pointing to %q", path, finalPath)
|
||||
|
||||
info, err := os.Stat(finalPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if info.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
if _, err := filepath.Rel(dpath, finalPath); err != nil {
|
||||
return fmt.Errorf("symbolic link %q targets a file outside of the plugin directory: %q", path, finalPath)
|
||||
}
|
||||
|
||||
if finalPath == manifestPath {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
if path == manifestPath {
|
||||
return nil
|
||||
}
|
||||
|
||||
h := sha256.New()
|
||||
//nolint:gosec
|
||||
f, err := os.Open(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer logCloseError(f.Close)
|
||||
if _, err := io.Copy(h, f); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
relPath, err := filepath.Rel(dpath, path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
chksums[relPath] = fmt.Sprintf("%x", h.Sum(nil))
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return chksums, nil
|
||||
}
|
||||
64
pkg/build/plugins/zip.go
Normal file
64
pkg/build/plugins/zip.go
Normal file
@@ -0,0 +1,64 @@
|
||||
package plugins
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
// Unzip unzips a plugin.
|
||||
func Unzip(fpath, tgtDir string) error {
|
||||
log.Printf("Unzipping plugin %q into %q...", fpath, tgtDir)
|
||||
|
||||
r, err := zip.OpenReader(fpath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer logCloseError(r.Close)
|
||||
|
||||
// Closure to address file descriptors issue with all the deferred .Close() methods
|
||||
extractAndWriteFile := func(f *zip.File) error {
|
||||
log.Printf("Extracting zip member %q...", f.Name)
|
||||
|
||||
rc, err := f.Open()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer logCloseError(rc.Close)
|
||||
|
||||
//nolint:gosec
|
||||
dstPath := filepath.Join(tgtDir, f.Name)
|
||||
|
||||
if f.FileInfo().IsDir() {
|
||||
return os.MkdirAll(dstPath, f.Mode())
|
||||
}
|
||||
|
||||
if err := os.MkdirAll(filepath.Dir(dstPath), f.Mode()); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
//nolint:gosec
|
||||
fd, err := os.OpenFile(dstPath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer logCloseError(fd.Close)
|
||||
|
||||
// nolint:gosec
|
||||
if _, err := io.Copy(fd, rc); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return fd.Close()
|
||||
}
|
||||
|
||||
for _, f := range r.File {
|
||||
if err := extractAndWriteFile(f); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
10
pkg/build/stringutil/contains.go
Normal file
10
pkg/build/stringutil/contains.go
Normal file
@@ -0,0 +1,10 @@
|
||||
package stringutil
|
||||
|
||||
func Contains(arr []string, s string) bool {
|
||||
for _, e := range arr {
|
||||
if e == s {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
43
pkg/build/syncutil/pool.go
Normal file
43
pkg/build/syncutil/pool.go
Normal file
@@ -0,0 +1,43 @@
|
||||
package syncutil
|
||||
|
||||
import (
|
||||
"log"
|
||||
"runtime"
|
||||
)
|
||||
|
||||
func worker(jobs chan func()) {
|
||||
for j := range jobs {
|
||||
j()
|
||||
}
|
||||
}
|
||||
|
||||
// WorkerPool represents a concurrent worker pool.
|
||||
type WorkerPool struct {
|
||||
NumWorkers int
|
||||
jobs chan func()
|
||||
}
|
||||
|
||||
// NewWorkerPool constructs a new WorkerPool.
|
||||
func NewWorkerPool(numWorkers int) WorkerPool {
|
||||
if numWorkers <= 0 {
|
||||
numWorkers = runtime.NumCPU()
|
||||
}
|
||||
log.Printf("Creating worker pool with %d workers", numWorkers)
|
||||
jobs := make(chan func(), 100)
|
||||
for i := 0; i < numWorkers; i++ {
|
||||
go worker(jobs)
|
||||
}
|
||||
return WorkerPool{
|
||||
NumWorkers: numWorkers,
|
||||
jobs: jobs,
|
||||
}
|
||||
}
|
||||
|
||||
// Schedule schedules a job to be executed by a worker in the pool.
|
||||
func (p WorkerPool) Schedule(job func()) {
|
||||
p.jobs <- job
|
||||
}
|
||||
|
||||
func (p WorkerPool) Close() {
|
||||
close(p.jobs)
|
||||
}
|
||||
@@ -38,6 +38,11 @@ func (ls *Implementation) CreateUser(cmd models.CreateUserCommand) (*models.User
|
||||
|
||||
// UpsertUser updates an existing user, or if it doesn't exist, inserts a new one.
|
||||
func (ls *Implementation) UpsertUser(ctx context.Context, cmd *models.UpsertUserCommand) error {
|
||||
var logger log.Logger = logger
|
||||
if cmd.ReqContext != nil && cmd.ReqContext.Logger != nil {
|
||||
logger = cmd.ReqContext.Logger
|
||||
}
|
||||
|
||||
extUser := cmd.ExternalUser
|
||||
|
||||
user, err := ls.AuthInfoService.LookupAndUpdate(ctx, &models.GetUserByAuthInfoQuery{
|
||||
@@ -50,13 +55,13 @@ func (ls *Implementation) UpsertUser(ctx context.Context, cmd *models.UpsertUser
|
||||
return err
|
||||
}
|
||||
if !cmd.SignupAllowed {
|
||||
cmd.ReqContext.Logger.Warn("Not allowing login, user not found in internal user database and allow signup = false", "authmode", extUser.AuthModule)
|
||||
logger.Warn("Not allowing login, user not found in internal user database and allow signup = false", "authmode", extUser.AuthModule)
|
||||
return login.ErrSignupNotAllowed
|
||||
}
|
||||
|
||||
limitReached, err := ls.QuotaService.QuotaReached(cmd.ReqContext, "user")
|
||||
if err != nil {
|
||||
cmd.ReqContext.Logger.Warn("Error getting user quota.", "error", err)
|
||||
logger.Warn("Error getting user quota.", "error", err)
|
||||
return login.ErrGettingUserQuota
|
||||
}
|
||||
if limitReached {
|
||||
|
||||
@@ -6,14 +6,17 @@ import (
|
||||
"errors"
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/services/login"
|
||||
|
||||
"github.com/go-kit/log"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log/level"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/services/login/logintest"
|
||||
"github.com/grafana/grafana/pkg/services/quota"
|
||||
"github.com/grafana/grafana/pkg/services/sqlstore/mockstore"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func Test_syncOrgRoles_doesNotBreakWhenTryingToRemoveLastOrgAdmin(t *testing.T) {
|
||||
@@ -112,6 +115,28 @@ func Test_teamSync(t *testing.T) {
|
||||
})
|
||||
}
|
||||
|
||||
func TestUpsertUser_crashOnLog_issue62538(t *testing.T) {
|
||||
authInfoMock := &logintest.AuthInfoServiceFake{}
|
||||
authInfoMock.ExpectedError = models.ErrUserNotFound
|
||||
loginsvc := Implementation{
|
||||
QuotaService: "a.QuotaService{},
|
||||
AuthInfoService: authInfoMock,
|
||||
}
|
||||
|
||||
email := "test_user@example.org"
|
||||
upsertCmd := &models.UpsertUserCommand{
|
||||
ExternalUser: &models.ExternalUserInfo{Email: email},
|
||||
UserLookupParams: models.UserLookupParams{Email: &email},
|
||||
SignupAllowed: false,
|
||||
}
|
||||
|
||||
var err error
|
||||
require.NotPanics(t, func() {
|
||||
err = loginsvc.UpsertUser(context.Background(), upsertCmd)
|
||||
})
|
||||
require.ErrorIs(t, err, login.ErrSignupNotAllowed)
|
||||
}
|
||||
|
||||
func createSimpleUser() models.User {
|
||||
user := models.User{
|
||||
Id: 1,
|
||||
|
||||
@@ -160,7 +160,7 @@ func (api *API) authorize(method, path string) web.Handler {
|
||||
eval = ac.EvalAny(ac.EvalPermission(ac.ActionAlertingNotificationsWrite))
|
||||
case http.MethodPost + "/api/alertmanager/grafana/config/api/v1/receivers/test":
|
||||
fallback = middleware.ReqEditorRole
|
||||
eval = ac.EvalPermission(ac.ActionAlertingNotificationsRead)
|
||||
eval = ac.EvalPermission(ac.ActionAlertingNotificationsWrite)
|
||||
|
||||
// External Alertmanager Paths
|
||||
case http.MethodDelete + "/api/alertmanager/{Recipient}/config/api/v1/alerts":
|
||||
|
||||
@@ -14,6 +14,7 @@ import (
|
||||
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
ngModels "github.com/grafana/grafana/pkg/services/ngalert/models"
|
||||
"github.com/grafana/grafana/pkg/services/sqlstore/migrations"
|
||||
"github.com/grafana/grafana/pkg/services/sqlstore/migrations/ualert"
|
||||
"github.com/grafana/grafana/pkg/services/sqlstore/migrator"
|
||||
@@ -459,6 +460,49 @@ func TestDashAlertMigration(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
// TestDashAlertMigration tests the execution of the main DashAlertMigration specifically for migrations of models.
|
||||
func TestDashAlertMigrationFolders(t *testing.T) {
|
||||
// Run initial migration to have a working DB.
|
||||
x := setupTestDB(t)
|
||||
|
||||
t.Run("when folder is missing put alert in General folder", func(t *testing.T) {
|
||||
o := createOrg(t, 1)
|
||||
folder1 := createDashboard(t, 1, o.Id, "folder-1")
|
||||
folder1.IsFolder = true
|
||||
dash1 := createDashboard(t, 3, o.Id, "dash1")
|
||||
dash1.FolderId = folder1.Id
|
||||
dash2 := createDashboard(t, 4, o.Id, "dash2")
|
||||
dash2.FolderId = 22 // missing folder
|
||||
|
||||
a1 := createAlert(t, o.Id, dash1.Id, int64(1), "alert-1", []string{})
|
||||
a2 := createAlert(t, o.Id, dash2.Id, int64(1), "alert-2", []string{})
|
||||
|
||||
_, err := x.Insert(o, folder1, dash1, dash2, a1, a2)
|
||||
require.NoError(t, err)
|
||||
|
||||
runDashAlertMigrationTestRun(t, x)
|
||||
|
||||
rules := getAlertRules(t, x, o.Id)
|
||||
require.Len(t, rules, 2)
|
||||
|
||||
var generalFolder models.Dashboard
|
||||
_, err = x.Table(&models.Dashboard{}).Where("title = ? AND org_id = ?", ualert.GENERAL_FOLDER, o.Id).Get(&generalFolder)
|
||||
require.NoError(t, err)
|
||||
|
||||
require.NotNil(t, generalFolder)
|
||||
|
||||
for _, rule := range rules {
|
||||
var expectedFolder models.Dashboard
|
||||
if rule.Title == a1.Name {
|
||||
expectedFolder = *folder1
|
||||
} else {
|
||||
expectedFolder = generalFolder
|
||||
}
|
||||
require.Equal(t, expectedFolder.Uid, rule.NamespaceUID)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// setupTestDB prepares the sqlite database and runs OSS migrations to initialize the schemas.
|
||||
func setupTestDB(t *testing.T) *xorm.Engine {
|
||||
t.Helper()
|
||||
@@ -593,6 +637,18 @@ func teardown(t *testing.T, x *xorm.Engine) {
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func runDashAlertMigrationTestRun(t *testing.T, x *xorm.Engine) {
|
||||
_, errDeleteMig := x.Exec("DELETE FROM migration_log WHERE migration_id = ?", ualert.MigTitle)
|
||||
require.NoError(t, errDeleteMig)
|
||||
|
||||
alertMigrator := migrator.NewMigrator(x, &setting.Cfg{})
|
||||
alertMigrator.AddMigration(ualert.RmMigTitle, &ualert.RmMigration{})
|
||||
ualert.AddDashAlertMigration(alertMigrator)
|
||||
|
||||
errRunningMig := alertMigrator.Start(false, 0)
|
||||
require.NoError(t, errRunningMig)
|
||||
}
|
||||
|
||||
// setupLegacyAlertsTables inserts data into the legacy alerting tables that is needed for testing the migration.
|
||||
func setupLegacyAlertsTables(t *testing.T, x *xorm.Engine, legacyChannels []*models.AlertNotification, alerts []*models.Alert) {
|
||||
t.Helper()
|
||||
@@ -637,6 +693,14 @@ func setupLegacyAlertsTables(t *testing.T, x *xorm.Engine, legacyChannels []*mod
|
||||
}
|
||||
}
|
||||
|
||||
func getAlertRules(t *testing.T, x *xorm.Engine, orgId int64) []*ngModels.AlertRule {
|
||||
rules := make([]*ngModels.AlertRule, 0)
|
||||
err := x.Table("alert_rule").Where("org_id = ?", orgId).Find(&rules)
|
||||
require.NoError(t, err)
|
||||
|
||||
return rules
|
||||
}
|
||||
|
||||
// getAlertmanagerConfig retreives the Alertmanager Config from the database for a given orgId.
|
||||
func getAlertmanagerConfig(t *testing.T, x *xorm.Engine, orgId int64) *ualert.PostableUserConfig {
|
||||
amConfig := ""
|
||||
|
||||
@@ -248,11 +248,31 @@ func (m *migration) Exec(sess *xorm.Session, mg *migrator.Migrator) error {
|
||||
|
||||
// cache for folders created for dashboards that have custom permissions
|
||||
folderCache := make(map[string]*dashboard)
|
||||
|
||||
gf := func(dash dashboard, da dashAlert) (*dashboard, error) {
|
||||
f, ok := folderCache[GENERAL_FOLDER]
|
||||
if !ok {
|
||||
// get or create general folder
|
||||
f, err = m.getOrCreateGeneralFolder(dash.OrgId)
|
||||
if err != nil {
|
||||
return nil, MigrationError{
|
||||
Err: fmt.Errorf("failed to get or create general folder under organisation %d: %w", dash.OrgId, err),
|
||||
AlertId: da.Id,
|
||||
}
|
||||
}
|
||||
folderCache[GENERAL_FOLDER] = f
|
||||
}
|
||||
// No need to assign default permissions to general folder
|
||||
// because they are included to the query result if it's a folder with no permissions
|
||||
// https://github.com/grafana/grafana/blob/076e2ce06a6ecf15804423fcc8dca1b620a321e5/pkg/services/sqlstore/dashboard_acl.go#L109
|
||||
return f, nil
|
||||
}
|
||||
|
||||
// Store of newly created rules to later create routes
|
||||
rulesPerOrg := make(map[int64]map[string]dashAlert)
|
||||
|
||||
for _, da := range dashAlerts {
|
||||
l := mg.Logger.New("ruleID", da.Id, "ruleName", da.Name, "dashboardUID", da.DashboardUID, "orgID", da.OrgId)
|
||||
newCond, err := transConditions(*da.ParsedSettings, da.OrgId, dsIDMap)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -282,7 +302,7 @@ func (m *migration) Exec(sess *xorm.Session, mg *migrator.Migrator) error {
|
||||
folderName := getAlertFolderNameFromDashboard(&dash)
|
||||
f, ok := folderCache[folderName]
|
||||
if !ok {
|
||||
mg.Logger.Info("create a new folder for alerts that belongs to dashboard because it has custom permissions", "org", dash.OrgId, "dashboard_uid", dash.Uid, "folder", folderName)
|
||||
l.Info("create a new folder for alerts that belongs to dashboard because it has custom permissions", "folder", folderName)
|
||||
// create folder and assign the permissions of the dashboard (included default and inherited)
|
||||
f, err = m.createFolder(dash.OrgId, folderName)
|
||||
if err != nil {
|
||||
@@ -312,29 +332,20 @@ func (m *migration) Exec(sess *xorm.Session, mg *migrator.Migrator) error {
|
||||
// get folder if exists
|
||||
f, err := m.getFolder(dash, da)
|
||||
if err != nil {
|
||||
return MigrationError{
|
||||
Err: err,
|
||||
AlertId: da.Id,
|
||||
}
|
||||
}
|
||||
folder = &f
|
||||
default:
|
||||
f, ok := folderCache[GENERAL_FOLDER]
|
||||
if !ok {
|
||||
// get or create general folder
|
||||
f, err = m.getOrCreateGeneralFolder(dash.OrgId)
|
||||
// If folder does not exist then the dashboard is an orphan and we migrate the alert to the general folder.
|
||||
l.Warn("Failed to find folder for dashboard. Migrate rule to the default folder", "rule_name", da.Name, "dashboard_uid", da.DashboardUID, "missing_folder_id", dash.FolderId)
|
||||
folder, err = gf(dash, da)
|
||||
if err != nil {
|
||||
return MigrationError{
|
||||
Err: fmt.Errorf("failed to get or create general folder under organisation %d: %w", dash.OrgId, err),
|
||||
AlertId: da.Id,
|
||||
}
|
||||
return err
|
||||
}
|
||||
folderCache[GENERAL_FOLDER] = f
|
||||
} else {
|
||||
folder = &f
|
||||
}
|
||||
default:
|
||||
folder, err = gf(dash, da)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// No need to assign default permissions to general folder
|
||||
// because they are included to the query result if it's a folder with no permissions
|
||||
// https://github.com/grafana/grafana/blob/076e2ce06a6ecf15804423fcc8dca1b620a321e5/pkg/services/sqlstore/dashboard_acl.go#L109
|
||||
folder = f
|
||||
}
|
||||
|
||||
if folder.Uid == "" {
|
||||
|
||||
@@ -288,15 +288,16 @@ type Cfg struct {
|
||||
DefaultHomeDashboardPath string
|
||||
|
||||
// Auth
|
||||
LoginCookieName string
|
||||
LoginMaxInactiveLifetime time.Duration
|
||||
LoginMaxLifetime time.Duration
|
||||
TokenRotationIntervalMinutes int
|
||||
SigV4AuthEnabled bool
|
||||
SigV4VerboseLogging bool
|
||||
BasicAuthEnabled bool
|
||||
AdminUser string
|
||||
AdminPassword string
|
||||
LoginCookieName string
|
||||
LoginMaxInactiveLifetime time.Duration
|
||||
LoginMaxLifetime time.Duration
|
||||
TokenRotationIntervalMinutes int
|
||||
SigV4AuthEnabled bool
|
||||
SigV4VerboseLogging bool
|
||||
BasicAuthEnabled bool
|
||||
AdminUser string
|
||||
AdminPassword string
|
||||
OAuthAllowInsecureEmailLookup bool
|
||||
|
||||
// AWS Plugin Auth
|
||||
AWSAllowedAuthProviders []string
|
||||
@@ -1261,6 +1262,9 @@ func readAuthSettings(iniFile *ini.File, cfg *Cfg) (err error) {
|
||||
} else {
|
||||
maxLifetimeDaysVal = "30d"
|
||||
}
|
||||
|
||||
cfg.OAuthAllowInsecureEmailLookup = auth.Key("oauth_allow_insecure_email_lookup").MustBool(false)
|
||||
|
||||
maxLifetimeDurationVal := valueAsString(auth, "login_maximum_lifetime_duration", maxLifetimeDaysVal)
|
||||
cfg.LoginMaxLifetime, err = gtime.ParseDuration(maxLifetimeDurationVal)
|
||||
if err != nil {
|
||||
|
||||
@@ -5,7 +5,7 @@ import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil" //nolint:staticcheck // No need to change in v8.
|
||||
"io/ioutil" //nolint:staticcheck // No need to change in v8.
|
||||
"net/http"
|
||||
"regexp"
|
||||
"strings"
|
||||
@@ -28,6 +28,8 @@ import (
|
||||
)
|
||||
|
||||
func TestAMConfigAccess(t *testing.T) {
|
||||
t.Skip("skip broken test")
|
||||
|
||||
_, err := tracing.InitializeTracerForTest()
|
||||
require.NoError(t, err)
|
||||
|
||||
@@ -204,7 +206,8 @@ func TestAMConfigAccess(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("when creating silence", func(t *testing.T) {
|
||||
body := `
|
||||
now := time.Now()
|
||||
body := fmt.Sprintf(`
|
||||
{
|
||||
"comment": "string",
|
||||
"createdBy": "string",
|
||||
@@ -216,9 +219,10 @@ func TestAMConfigAccess(t *testing.T) {
|
||||
"value": "string"
|
||||
}
|
||||
],
|
||||
"startsAt": "2021-03-31T13:17:04.419Z"
|
||||
"startsAt": "%s",
|
||||
"endsAt": "%s"
|
||||
}
|
||||
`
|
||||
`, now.Format(time.RFC3339), now.Add(10*time.Second).Format(time.RFC3339))
|
||||
|
||||
testCases := []testCase{
|
||||
{
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@grafana-plugins/input-datasource",
|
||||
"version": "8.5.20",
|
||||
"version": "8.5.27",
|
||||
"description": "Input Datasource",
|
||||
"private": true,
|
||||
"repository": {
|
||||
@@ -24,9 +24,9 @@
|
||||
"webpack": "5.58.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"@grafana/data": "8.5.20",
|
||||
"@grafana/toolkit": "8.5.20",
|
||||
"@grafana/ui": "8.5.20",
|
||||
"@grafana/data": "8.5.27",
|
||||
"@grafana/toolkit": "8.5.27",
|
||||
"@grafana/ui": "8.5.27",
|
||||
"jquery": "3.5.1",
|
||||
"react": "17.0.1",
|
||||
"react-dom": "17.0.1",
|
||||
|
||||
@@ -11,11 +11,9 @@ export interface FunctionEditorControlsProps {
|
||||
}
|
||||
|
||||
const FunctionDescription = React.lazy(async () => {
|
||||
// @ts-ignore
|
||||
const { default: rst2html } = await import(/* webpackChunkName: "rst2html" */ 'rst2html');
|
||||
return {
|
||||
default(props: { description?: string }) {
|
||||
return <div dangerouslySetInnerHTML={{ __html: rst2html(props.description ?? '') }} />;
|
||||
return <div>{props.description}</div>;
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
@@ -24,6 +24,7 @@ import {
|
||||
DataHoverEvent,
|
||||
DataFrame,
|
||||
FrameGeometrySourceMode,
|
||||
textUtil,
|
||||
} from '@grafana/data';
|
||||
import { config } from '@grafana/runtime';
|
||||
import { PanelContext, PanelContextRoot, stylesFactory } from '@grafana/ui';
|
||||
@@ -490,6 +491,10 @@ export class GeomapPanel extends Component<Props, State> {
|
||||
return Promise.reject('unknown layer: ' + options.type);
|
||||
}
|
||||
|
||||
if (options.config?.attribution) {
|
||||
options.config.attribution = textUtil.sanitizeTextPanelContent(options.config.attribution);
|
||||
}
|
||||
|
||||
const handler = await item.create(map, options, config.theme2);
|
||||
const layer = handler.init();
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Use old Debian (this has support into 2022) in order to ensure binary compatibility with older glibc's.
|
||||
FROM debian:stretch-20210208 AS toolchain
|
||||
# Use old Debian (this has support into 2024) in order to ensure binary compatibility with older glibc's.
|
||||
FROM debian:buster-20230502 AS toolchain
|
||||
|
||||
ENV OSX_MIN=10.10 \
|
||||
CTNG=1.24.0 \
|
||||
@@ -107,10 +107,10 @@ RUN tar -xzvf dockerize-linux-amd64-v${DOCKERIZE_VERSION}.tar.gz -C /tmp/
|
||||
RUN rm dockerize-linux-amd64-v${DOCKERIZE_VERSION}.tar.gz
|
||||
|
||||
# Base image to crossbuild grafana.
|
||||
# Use old Debian (this has support into 2022) in order to ensure binary compatibility with older glibc's.
|
||||
FROM debian:stretch-20210208
|
||||
# Use old Debian (this has support into 2024) in order to ensure binary compatibility with older glibc's.
|
||||
FROM debian:buster-20230502
|
||||
|
||||
ENV GOVERSION=1.19.4 \
|
||||
ENV GOVERSION=1.19.9 \
|
||||
PATH=/usr/local/go/bin:$PATH \
|
||||
GOPATH=/go \
|
||||
NODEVERSION=16.14.0-1nodesource1 \
|
||||
|
||||
@@ -11,6 +11,8 @@ load(
|
||||
'build_image',
|
||||
'identify_runner_step',
|
||||
'publish_image',
|
||||
'publish_linux_packages_step',
|
||||
'publish_grafanacom_step',
|
||||
'lint_backend_step',
|
||||
'lint_frontend_step',
|
||||
'codespell_step',
|
||||
@@ -42,7 +44,8 @@ load(
|
||||
'validate_scuemata_step',
|
||||
'ensure_cuetsified_step',
|
||||
'publish_images_step',
|
||||
'trigger_oss'
|
||||
'trigger_oss',
|
||||
'compile_build_cmd',
|
||||
)
|
||||
|
||||
load(
|
||||
@@ -404,12 +407,18 @@ def publish_packages_pipeline():
|
||||
}
|
||||
oss_steps = [
|
||||
download_grabpl_step(),
|
||||
store_packages_step(edition='oss', ver_mode='release'),
|
||||
compile_build_cmd(),
|
||||
publish_linux_packages_step(edition = "oss", package_manager = "deb"),
|
||||
publish_linux_packages_step(edition = "oss", package_manager = "rpm"),
|
||||
publish_grafanacom_step(edition = "oss", ver_mode = "release"),
|
||||
]
|
||||
|
||||
enterprise_steps = [
|
||||
download_grabpl_step(),
|
||||
store_packages_step(edition='enterprise', ver_mode='release'),
|
||||
compile_build_cmd(),
|
||||
publish_linux_packages_step(edition = "enterprise", package_manager = "deb"),
|
||||
publish_linux_packages_step(edition = "enterprise", package_manager = "rpm"),
|
||||
publish_grafanacom_step(edition = "enterprise", ver_mode = "release"),
|
||||
]
|
||||
deps = [
|
||||
'publish-artifacts-public',
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
load('scripts/drone/vault.star', 'from_secret', 'github_token', 'pull_secret', 'drone_token', 'prerelease_bucket')
|
||||
|
||||
grabpl_version = 'v2.9.50'
|
||||
build_image = 'grafana/build-container:1.5.5-go1.19.4'
|
||||
grabpl_version = 'v2.9.50-fixfpm-4'
|
||||
build_image = 'grafana/build-container:1.5.5-go1.19.9'
|
||||
publish_image = 'grafana/grafana-ci-deploy:1.3.1'
|
||||
deploy_docker_image = 'us.gcr.io/kubernetes-dev/drone/plugins/deploy-image'
|
||||
alpine_image = 'alpine:3.15'
|
||||
curl_image = 'byrnedo/alpine-curl:0.1.8'
|
||||
windows_image = 'mcr.microsoft.com/windows:1809'
|
||||
wix_image = 'grafana/ci-wix:0.1.1'
|
||||
go_image = "golang:1.19.9"
|
||||
|
||||
disable_tests = False
|
||||
trigger_oss = {
|
||||
@@ -117,14 +118,38 @@ def init_enterprise_step(ver_mode):
|
||||
source_commit = ' ${DRONE_TAG}'
|
||||
environment = {
|
||||
'GITHUB_TOKEN': from_secret(github_token),
|
||||
'GRAFANA_BRANCH': 'v8.5.x',
|
||||
}
|
||||
token = "--github-token $${GITHUB_TOKEN}"
|
||||
elif ver_mode == 'release-branch':
|
||||
environment = {}
|
||||
environment = {
|
||||
'GRAFANA_BRANCH': 'v8.5.x',
|
||||
}
|
||||
token = ""
|
||||
else:
|
||||
environment = {}
|
||||
environment = {
|
||||
'GRAFANA_BRANCH': 'v8.5.x',
|
||||
}
|
||||
token = ""
|
||||
commands = [
|
||||
'mv bin/grabpl /tmp/',
|
||||
'rmdir bin',
|
||||
'mv grafana-enterprise /tmp/',
|
||||
]
|
||||
|
||||
if ver_mode == 'release':
|
||||
commands += [
|
||||
'export DRONE_TARGET_BRANCH=$${DRONE_TAG}',
|
||||
'export DRONE_SOURCE_BRANCH=v8.5.x',
|
||||
]
|
||||
|
||||
commands += [
|
||||
'/tmp/grabpl init-enterprise {} /tmp/grafana-enterprise{}'.format(token, source_commit),
|
||||
'mv /tmp/grafana-enterprise/deployment_tools_config.json deployment_tools_config.json',
|
||||
'mkdir bin',
|
||||
'mv /tmp/grabpl bin/'
|
||||
]
|
||||
|
||||
return {
|
||||
'name': 'init-enterprise',
|
||||
'image': build_image,
|
||||
@@ -132,15 +157,7 @@ def init_enterprise_step(ver_mode):
|
||||
'clone-enterprise',
|
||||
],
|
||||
'environment': environment,
|
||||
'commands': [
|
||||
'mv bin/grabpl /tmp/',
|
||||
'rmdir bin',
|
||||
'mv grafana-enterprise /tmp/',
|
||||
'/tmp/grabpl init-enterprise {} /tmp/grafana-enterprise{}'.format(token, source_commit),
|
||||
'mv /tmp/grafana-enterprise/deployment_tools_config.json deployment_tools_config.json',
|
||||
'mkdir bin',
|
||||
'mv /tmp/grabpl bin/'
|
||||
],
|
||||
'commands': commands,
|
||||
}
|
||||
|
||||
|
||||
@@ -649,9 +666,9 @@ def package_step(edition, ver_mode, include_enterprise2=False, variants=None):
|
||||
sign_args = ' --sign'
|
||||
env = {
|
||||
'GRAFANA_API_KEY': from_secret('grafana_api_key'),
|
||||
'GPG_PRIV_KEY': from_secret('gpg_priv_key'),
|
||||
'GPG_PUB_KEY': from_secret('gpg_pub_key'),
|
||||
'GPG_KEY_PASSWORD': from_secret('gpg_key_password'),
|
||||
'GPG_PRIV_KEY': from_secret('packages_gpg_private_key'),
|
||||
'GPG_PUB_KEY': from_secret('packages_gpg_public_key'),
|
||||
'GPG_KEY_PASSWORD': from_secret('packages_gpg_passphrase'),
|
||||
}
|
||||
test_args = ''
|
||||
else:
|
||||
@@ -996,15 +1013,66 @@ def store_packages_step(edition, ver_mode):
|
||||
'environment': {
|
||||
'GRAFANA_COM_API_KEY': from_secret('grafana_api_key'),
|
||||
'GCP_KEY': from_secret('gcp_key'),
|
||||
'GPG_PRIV_KEY': from_secret('gpg_priv_key'),
|
||||
'GPG_PUB_KEY': from_secret('gpg_pub_key'),
|
||||
'GPG_KEY_PASSWORD': from_secret('gpg_key_password'),
|
||||
'GPG_PRIV_KEY': from_secret('packages_gpg_private_key'),
|
||||
'GPG_PUB_KEY': from_secret('packages_gpg_public_key'),
|
||||
'GPG_KEY_PASSWORD': from_secret('packages_gpg_passphrase'),
|
||||
},
|
||||
'commands': [
|
||||
cmd,
|
||||
],
|
||||
}
|
||||
|
||||
def publish_grafanacom_step(edition, ver_mode):
|
||||
if ver_mode == 'release':
|
||||
cmd = './bin/build publish grafana-com --edition {} ${{DRONE_TAG}}'.format(
|
||||
edition,
|
||||
)
|
||||
elif ver_mode == 'main':
|
||||
build_no = '${DRONE_BUILD_NUMBER}'
|
||||
cmd = './bin/build publish grafana-com --edition {} --build-id {}'.format(
|
||||
edition, build_no,
|
||||
)
|
||||
else:
|
||||
fail('Unexpected version mode {}'.format(ver_mode))
|
||||
|
||||
return {
|
||||
'name': 'publish-grafanacom-{}'.format(edition),
|
||||
'image': publish_image,
|
||||
'depends_on': [
|
||||
"publish-linux-packages-deb",
|
||||
"publish-linux-packages-rpm",
|
||||
],
|
||||
'environment': {
|
||||
'GRAFANA_COM_API_KEY': from_secret('grafana_api_key'),
|
||||
'GCP_KEY': from_secret('gcp_key'),
|
||||
},
|
||||
'commands': [
|
||||
cmd,
|
||||
],
|
||||
}
|
||||
|
||||
def publish_linux_packages_step(edition, package_manager='deb'):
|
||||
return {
|
||||
'name': 'publish-linux-packages-{}'.format(package_manager),
|
||||
# See https://github.com/grafana/deployment_tools/blob/master/docker/package-publish/README.md for docs on that image
|
||||
'image': 'us.gcr.io/kubernetes-dev/package-publish:latest',
|
||||
'depends_on': [
|
||||
'grabpl'
|
||||
],
|
||||
'privileged': True,
|
||||
'failure': 'ignore', # While we're testing it
|
||||
'settings': {
|
||||
'access_key_id': from_secret('packages_access_key_id'),
|
||||
'secret_access_key': from_secret('packages_secret_access_key'),
|
||||
'service_account_json': from_secret('packages_service_account'),
|
||||
'target_bucket': 'grafana-packages',
|
||||
'deb_distribution': 'stable',
|
||||
'gpg_passphrase': from_secret('packages_gpg_passphrase'),
|
||||
'gpg_public_key': from_secret('packages_gpg_public_key'),
|
||||
'gpg_private_key': from_secret('packages_gpg_private_key'),
|
||||
'package_path': 'gs://grafana-prerelease/artifacts/downloads/*${{DRONE_TAG}}/{}/**.{}'.format(edition, package_manager)
|
||||
}
|
||||
}
|
||||
|
||||
def get_windows_steps(edition, ver_mode):
|
||||
init_cmds = []
|
||||
@@ -1074,7 +1142,8 @@ def get_windows_steps(edition, ver_mode):
|
||||
'environment': {
|
||||
'GCP_KEY': from_secret('gcp_key'),
|
||||
'PRERELEASE_BUCKET': from_secret(prerelease_bucket),
|
||||
'GITHUB_TOKEN': from_secret('github_token')
|
||||
'GITHUB_TOKEN': from_secret('github_token'),
|
||||
'GRAFANA_BRANCH': 'v8.5.x',
|
||||
},
|
||||
'commands': installer_commands,
|
||||
})
|
||||
@@ -1116,13 +1185,20 @@ def get_windows_steps(edition, ver_mode):
|
||||
'rm -r -force grafana-enterprise',
|
||||
'cp grabpl.exe C:\\App\\grabpl.exe',
|
||||
'rm -force grabpl.exe',
|
||||
'C:\\App\\grabpl.exe init-enterprise --github-token $$env:GITHUB_TOKEN C:\\App\\grafana-enterprise',
|
||||
'set DRONE_SOURCE_BRANCH=v8.5.x',
|
||||
'C:\\App\\grabpl.exe init-enterprise --github-token $$env:GITHUB_TOKEN C:\\App\\grafana-enterprise {}'.format(committish),
|
||||
'cp C:\\App\\grabpl.exe grabpl.exe',
|
||||
])
|
||||
if 'environment' in steps[1]:
|
||||
steps[1]['environment'] + {'GITHUB_TOKEN': from_secret(github_token)}
|
||||
steps[1]['environment'] + {
|
||||
'GITHUB_TOKEN': from_secret(github_token),
|
||||
'GRAFANA_BRANCH': 'v8.5.x',
|
||||
}
|
||||
else:
|
||||
steps[1]['environment'] = {'GITHUB_TOKEN': from_secret(github_token)}
|
||||
steps[1]['environment'] = {
|
||||
'GITHUB_TOKEN': from_secret(github_token),
|
||||
'GRAFANA_BRANCH': 'v8.5.x',
|
||||
}
|
||||
|
||||
return steps
|
||||
|
||||
@@ -1166,3 +1242,21 @@ def end_to_end_tests_deps(edition):
|
||||
'end-to-end-tests-smoke-tests-suite' + enterprise2_suffix(edition),
|
||||
'end-to-end-tests-various-suite' + enterprise2_suffix(edition),
|
||||
]
|
||||
|
||||
def compile_build_cmd(edition = "oss"):
|
||||
dependencies = []
|
||||
if edition in ("enterprise", "enterprise2"):
|
||||
dependencies = [
|
||||
"init-enterprise",
|
||||
]
|
||||
return {
|
||||
"name": "compile-build-cmd",
|
||||
"image": go_image,
|
||||
"commands": [
|
||||
"go build -o ./bin/build -ldflags '-extldflags -static' ./pkg/build/cmd",
|
||||
],
|
||||
"depends_on": dependencies,
|
||||
"environment": {
|
||||
"CGO_ENABLED": 0,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -47,6 +47,7 @@ def pipeline(
|
||||
},
|
||||
}],
|
||||
'depends_on': depends_on,
|
||||
'image_pull_secrets': [pull_secret],
|
||||
}
|
||||
if environment:
|
||||
pipeline.update({
|
||||
@@ -57,7 +58,6 @@ def pipeline(
|
||||
pipeline.update(platform_conf)
|
||||
|
||||
if edition in ('enterprise', 'enterprise2'):
|
||||
pipeline['image_pull_secrets'] = [pull_secret]
|
||||
# We have a custom clone step for enterprise
|
||||
pipeline['clone'] = {
|
||||
'disable': True,
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
pull_secret = 'dockerconfigjson'
|
||||
github_token = 'github_token'
|
||||
drone_token = 'drone_token'
|
||||
prerelease_bucket = 'prerelease_bucket'
|
||||
gcp_upload_artifacts_key = 'gcp_upload_artifacts_key'
|
||||
azure_sp_app_id = 'azure_sp_app_id'
|
||||
azure_sp_app_pw = 'azure_sp_app_pw'
|
||||
azure_tenant = 'azure_tenant'
|
||||
github_token = 'github_token'
|
||||
|
||||
|
||||
def from_secret(secret):
|
||||
return {
|
||||
'from_secret': secret
|
||||
}
|
||||
return {'from_secret': secret}
|
||||
|
||||
|
||||
def vault_secret(name, path, key):
|
||||
return {
|
||||
@@ -16,14 +19,80 @@ def vault_secret(name, path, key):
|
||||
'get': {
|
||||
'path': path,
|
||||
'name': key,
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def secrets():
|
||||
return [
|
||||
vault_secret(pull_secret, 'secret/data/common/gcr', '.dockerconfigjson'),
|
||||
vault_secret(github_token, 'infra/data/ci/github/grafanabot', 'pat'),
|
||||
vault_secret(drone_token, 'infra/data/ci/drone', 'machine-user-token'),
|
||||
vault_secret(prerelease_bucket, 'infra/data/ci/grafana/prerelease', 'bucket'),
|
||||
vault_secret(gcp_upload_artifacts_key, 'infra/data/ci/grafana/releng/artifacts-uploader-service-account', 'credentials.json'),
|
||||
vault_secret(
|
||||
gcp_upload_artifacts_key,
|
||||
'infra/data/ci/grafana/releng/artifacts-uploader-service-account',
|
||||
'credentials.json',
|
||||
),
|
||||
vault_secret(
|
||||
azure_sp_app_id,
|
||||
'infra/data/ci/datasources/cpp-azure-resourcemanager-credentials',
|
||||
'application_id',
|
||||
),
|
||||
vault_secret(
|
||||
azure_sp_app_pw,
|
||||
'infra/data/ci/datasources/cpp-azure-resourcemanager-credentials',
|
||||
'application_secret',
|
||||
),
|
||||
vault_secret(
|
||||
azure_tenant,
|
||||
'infra/data/ci/datasources/cpp-azure-resourcemanager-credentials',
|
||||
'tenant_id',
|
||||
),
|
||||
# Package publishing
|
||||
vault_secret(
|
||||
'packages_gpg_public_key',
|
||||
'infra/data/ci/packages-publish/gpg',
|
||||
'public-key-b64',
|
||||
),
|
||||
vault_secret(
|
||||
'packages_gpg_private_key',
|
||||
'infra/data/ci/packages-publish/gpg',
|
||||
'private-key-b64',
|
||||
),
|
||||
vault_secret(
|
||||
'packages_gpg_passphrase',
|
||||
'infra/data/ci/packages-publish/gpg',
|
||||
'passphrase',
|
||||
),
|
||||
vault_secret(
|
||||
'packages_service_account',
|
||||
'infra/data/ci/packages-publish/service-account',
|
||||
'credentials.json',
|
||||
),
|
||||
vault_secret(
|
||||
'packages_access_key_id',
|
||||
'infra/data/ci/packages-publish/bucket-credentials',
|
||||
'AccessID',
|
||||
),
|
||||
vault_secret(
|
||||
'packages_secret_access_key',
|
||||
'infra/data/ci/packages-publish/bucket-credentials',
|
||||
'Secret',
|
||||
),
|
||||
vault_secret(
|
||||
'aws_region',
|
||||
'secret/data/common/aws-marketplace',
|
||||
'aws_region',
|
||||
),
|
||||
vault_secret(
|
||||
'aws_access_key_id',
|
||||
'secret/data/common/aws-marketplace',
|
||||
'aws_access_key_id',
|
||||
),
|
||||
vault_secret(
|
||||
'aws_secret_access_key',
|
||||
'secret/data/common/aws-marketplace',
|
||||
'aws_secret_access_key',
|
||||
),
|
||||
]
|
||||
|
||||
40
yarn.lock
40
yarn.lock
@@ -3949,9 +3949,9 @@ __metadata:
|
||||
version: 0.0.0-use.local
|
||||
resolution: "@grafana-plugins/input-datasource@workspace:plugins-bundled/internal/input-datasource"
|
||||
dependencies:
|
||||
"@grafana/data": 8.5.20
|
||||
"@grafana/toolkit": 8.5.20
|
||||
"@grafana/ui": 8.5.20
|
||||
"@grafana/data": 8.5.27
|
||||
"@grafana/toolkit": 8.5.27
|
||||
"@grafana/ui": 8.5.27
|
||||
"@types/jest": 26.0.15
|
||||
"@types/lodash": 4.14.149
|
||||
"@types/react": 17.0.30
|
||||
@@ -3992,12 +3992,12 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@grafana/data@8.5.20, @grafana/data@workspace:*, @grafana/data@workspace:packages/grafana-data":
|
||||
"@grafana/data@8.5.27, @grafana/data@workspace:*, @grafana/data@workspace:packages/grafana-data":
|
||||
version: 0.0.0-use.local
|
||||
resolution: "@grafana/data@workspace:packages/grafana-data"
|
||||
dependencies:
|
||||
"@braintree/sanitize-url": 6.0.0
|
||||
"@grafana/schema": 8.5.20
|
||||
"@grafana/schema": 8.5.27
|
||||
"@grafana/tsconfig": ^1.2.0-rc1
|
||||
"@rollup/plugin-commonjs": 21.0.2
|
||||
"@rollup/plugin-json": 4.1.0
|
||||
@@ -4050,7 +4050,7 @@ __metadata:
|
||||
languageName: unknown
|
||||
linkType: soft
|
||||
|
||||
"@grafana/e2e-selectors@8.5.20, @grafana/e2e-selectors@workspace:*, @grafana/e2e-selectors@workspace:packages/grafana-e2e-selectors":
|
||||
"@grafana/e2e-selectors@8.5.27, @grafana/e2e-selectors@workspace:*, @grafana/e2e-selectors@workspace:packages/grafana-e2e-selectors":
|
||||
version: 0.0.0-use.local
|
||||
resolution: "@grafana/e2e-selectors@workspace:packages/grafana-e2e-selectors"
|
||||
dependencies:
|
||||
@@ -4074,7 +4074,7 @@ __metadata:
|
||||
"@babel/core": 7.17.8
|
||||
"@babel/preset-env": 7.16.11
|
||||
"@cypress/webpack-preprocessor": 5.11.1
|
||||
"@grafana/e2e-selectors": 8.5.20
|
||||
"@grafana/e2e-selectors": 8.5.27
|
||||
"@grafana/tsconfig": ^1.2.0-rc1
|
||||
"@mochajs/json-file-reporter": ^1.2.0
|
||||
"@rollup/plugin-commonjs": 21.0.2
|
||||
@@ -4181,10 +4181,10 @@ __metadata:
|
||||
version: 0.0.0-use.local
|
||||
resolution: "@grafana/runtime@workspace:packages/grafana-runtime"
|
||||
dependencies:
|
||||
"@grafana/data": 8.5.20
|
||||
"@grafana/e2e-selectors": 8.5.20
|
||||
"@grafana/data": 8.5.27
|
||||
"@grafana/e2e-selectors": 8.5.27
|
||||
"@grafana/tsconfig": ^1.2.0-rc1
|
||||
"@grafana/ui": 8.5.20
|
||||
"@grafana/ui": 8.5.27
|
||||
"@rollup/plugin-commonjs": 21.0.2
|
||||
"@rollup/plugin-node-resolve": 13.1.3
|
||||
"@sentry/browser": 6.19.1
|
||||
@@ -4213,7 +4213,7 @@ __metadata:
|
||||
languageName: unknown
|
||||
linkType: soft
|
||||
|
||||
"@grafana/schema@8.5.20, @grafana/schema@workspace:*, @grafana/schema@workspace:packages/grafana-schema":
|
||||
"@grafana/schema@8.5.27, @grafana/schema@workspace:*, @grafana/schema@workspace:packages/grafana-schema":
|
||||
version: 0.0.0-use.local
|
||||
resolution: "@grafana/schema@workspace:packages/grafana-schema"
|
||||
dependencies:
|
||||
@@ -4231,16 +4231,16 @@ __metadata:
|
||||
languageName: unknown
|
||||
linkType: soft
|
||||
|
||||
"@grafana/toolkit@8.5.20, @grafana/toolkit@workspace:*, @grafana/toolkit@workspace:packages/grafana-toolkit":
|
||||
"@grafana/toolkit@8.5.27, @grafana/toolkit@workspace:*, @grafana/toolkit@workspace:packages/grafana-toolkit":
|
||||
version: 0.0.0-use.local
|
||||
resolution: "@grafana/toolkit@workspace:packages/grafana-toolkit"
|
||||
dependencies:
|
||||
"@babel/core": 7.13.14
|
||||
"@babel/preset-env": 7.13.12
|
||||
"@grafana/data": 8.5.20
|
||||
"@grafana/data": 8.5.27
|
||||
"@grafana/eslint-config": 2.5.2
|
||||
"@grafana/tsconfig": ^1.2.0-rc1
|
||||
"@grafana/ui": 8.5.20
|
||||
"@grafana/ui": 8.5.27
|
||||
"@jest/core": 26.6.3
|
||||
"@rushstack/eslint-patch": 1.0.6
|
||||
"@types/command-exists": ^1.2.0
|
||||
@@ -4324,7 +4324,7 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@grafana/ui@8.5.20, @grafana/ui@workspace:*, @grafana/ui@workspace:packages/grafana-ui":
|
||||
"@grafana/ui@8.5.27, @grafana/ui@workspace:*, @grafana/ui@workspace:packages/grafana-ui":
|
||||
version: 0.0.0-use.local
|
||||
resolution: "@grafana/ui@workspace:packages/grafana-ui"
|
||||
dependencies:
|
||||
@@ -4332,9 +4332,9 @@ __metadata:
|
||||
"@emotion/css": 11.7.1
|
||||
"@emotion/react": 11.8.2
|
||||
"@grafana/aws-sdk": 0.0.35
|
||||
"@grafana/data": 8.5.20
|
||||
"@grafana/e2e-selectors": 8.5.20
|
||||
"@grafana/schema": 8.5.20
|
||||
"@grafana/data": 8.5.27
|
||||
"@grafana/e2e-selectors": 8.5.27
|
||||
"@grafana/schema": 8.5.27
|
||||
"@grafana/tsconfig": ^1.2.0-rc1
|
||||
"@mdx-js/react": 1.6.22
|
||||
"@monaco-editor/react": 4.3.1
|
||||
@@ -4570,9 +4570,9 @@ __metadata:
|
||||
resolution: "@jaegertracing/jaeger-ui-components@workspace:packages/jaeger-ui-components"
|
||||
dependencies:
|
||||
"@emotion/css": 11.7.1
|
||||
"@grafana/data": 8.5.20
|
||||
"@grafana/data": 8.5.27
|
||||
"@grafana/tsconfig": ^1.2.0-rc1
|
||||
"@grafana/ui": 8.5.20
|
||||
"@grafana/ui": 8.5.27
|
||||
"@types/classnames": ^2.2.7
|
||||
"@types/deep-freeze": ^0.1.1
|
||||
"@types/hoist-non-react-statics": ^3.3.1
|
||||
|
||||
Reference in New Issue
Block a user