Compare commits
44 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 7e1ae90334 | |||
| 87fa31e57b | |||
| 26359899d5 | |||
| b841dfa2bc | |||
| e675b6a032 | |||
| 271f268d68 | |||
| b059912f7d | |||
| 24df78fb4e | |||
| dcbbf64aa0 | |||
| ce8c46fb91 | |||
| 0f8f5c86ef | |||
| 111c53f9fd | |||
| cc8928a6a7 | |||
| 46956a2997 | |||
| af460952d5 | |||
| 3a8bff55cd | |||
| 7223130454 | |||
| d53bf3d740 | |||
| 6a046831fc | |||
| 9b4414de27 | |||
| c7b629d3bc | |||
| bbd19baaaf | |||
| 5a2ab9b8b0 | |||
| d1bd29aa3b | |||
| 7d205c7dee | |||
| e5a98c3c43 | |||
| 50403b38d6 | |||
| f6570f8123 | |||
| c5cf9ff393 | |||
| 9738c198b9 | |||
| 06bf567e1c | |||
| c68d3a2ffa | |||
| 59cc00b07e | |||
| 8ce2c2d3eb | |||
| 987573a17c | |||
| 49f78c15e8 | |||
| 76340a9741 | |||
| b15acdf1f2 | |||
| ca8402fbda | |||
| abb44794fe | |||
| c228eaa99d | |||
| f41cc1c0d6 | |||
| b557d71c9a | |||
| e404352a38 |
@@ -13,17 +13,29 @@ on:
|
||||
required: false
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
id-token: write
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
bump-version:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Grafana
|
||||
uses: actions/checkout@v4
|
||||
- uses: grafana/shared-workflows/actions/get-vault-secrets@main
|
||||
with:
|
||||
persist-credentials: false
|
||||
repo_secrets: |
|
||||
GRAFANA_DELIVERY_BOT_APP_PEM=delivery-bot-app:PRIVATE_KEY
|
||||
- name: Generate token
|
||||
id: generate_token
|
||||
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
|
||||
with:
|
||||
app_id: ${{ vars.DELIVERY_BOT_APP_ID }}
|
||||
private_key: ${{ env.GRAFANA_DELIVERY_BOT_APP_PEM }}
|
||||
repositories: '["grafana"]'
|
||||
permissions: '{"contents": "write", "pull_requests": "write", "workflows": "write"}'
|
||||
- name: Checkout Grafana
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- name: Update package.json versions
|
||||
uses: ./pkg/build/actions/bump-version
|
||||
with:
|
||||
@@ -35,10 +47,10 @@ jobs:
|
||||
DRY_RUN: ${{ inputs.dry_run }}
|
||||
REF_NAME: ${{ github.ref_name }}
|
||||
RUN_ID: ${{ github.run_id }}
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GH_TOKEN: ${{ steps.generate_token.outputs.token }}
|
||||
run: |
|
||||
git config --local user.name "github-actions[bot]"
|
||||
git config --local user.email "github-actions[bot]@users.noreply.github.com"
|
||||
git config --local user.name "grafana-delivery-bot[bot]"
|
||||
git config --local user.email "grafana-delivery-bot[bot]@users.noreply.github.com"
|
||||
git config --local --add --bool push.autoSetupRemote true
|
||||
git checkout -b "bump-version/${RUN_ID}/${VERSION}"
|
||||
git add .
|
||||
|
||||
@@ -33,9 +33,13 @@ on:
|
||||
type: string
|
||||
required: false
|
||||
default: github-prerelease-writer@grafanalabs-workload-identity.iam.gserviceaccount.com
|
||||
runs-on:
|
||||
type: string
|
||||
required: false
|
||||
default: github-hosted-ubuntu-x64-small
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: github-hosted-ubuntu-x64-small
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
name: Publish
|
||||
permissions:
|
||||
id-token: write
|
||||
|
||||
@@ -56,7 +56,7 @@ jobs:
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Set up version (Release Branches)
|
||||
@@ -140,7 +140,7 @@ jobs:
|
||||
# The downside to this is that the frontend will be built for each one when it could be reused for all of them.
|
||||
# This could be a future improvement.
|
||||
include:
|
||||
- name: linux-amd64
|
||||
- name: linux-amd64 # publish-npm relies on this step building npm packages
|
||||
artifacts: targz:grafana:linux/amd64,deb:grafana:linux/amd64,rpm:grafana:linux/amd64,docker:grafana:linux/amd64,docker:grafana:linux/amd64:ubuntu,npm:grafana,storybook
|
||||
verify: true
|
||||
- name: linux-arm64
|
||||
@@ -169,7 +169,7 @@ jobs:
|
||||
verify: true
|
||||
steps:
|
||||
- uses: grafana/shared-workflows/actions/dockerhub-login@dockerhub-login/v1.0.2
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Set up QEMU
|
||||
@@ -197,6 +197,7 @@ jobs:
|
||||
name: artifacts-${{ matrix.name }}
|
||||
path: ${{ steps.build.outputs.dist-dir }}
|
||||
retention-days: 1
|
||||
|
||||
publish-artifacts:
|
||||
name: Upload artifacts
|
||||
uses: grafana/grafana/.github/workflows/publish-artifact.yml@main
|
||||
@@ -211,6 +212,7 @@ jobs:
|
||||
run-id: ${{ github.run_id }}
|
||||
bucket-path: ${{ needs.setup.outputs.version }}_${{ github.run_id }}
|
||||
environment: prod
|
||||
|
||||
publish-dockerhub:
|
||||
if: github.ref_name == 'main'
|
||||
permissions:
|
||||
@@ -268,3 +270,68 @@ jobs:
|
||||
docker manifest push grafana/grafana:main-ubuntu
|
||||
docker manifest push "grafana/grafana-dev:${VERSION}"
|
||||
docker manifest push "grafana/grafana-dev:${VERSION}-ubuntu"
|
||||
|
||||
publish-npm-canaries:
|
||||
if: github.ref_name == 'main'
|
||||
name: Publish NPM canaries
|
||||
uses: ./.github/workflows/release-npm.yml
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
needs:
|
||||
- setup
|
||||
- build
|
||||
with:
|
||||
grafana_commit: ${{ needs.setup.outputs.grafana-commit }}
|
||||
version: ${{ needs.setup.outputs.version }}
|
||||
build_id: ${{ github.run_id }}
|
||||
version_type: "canary"
|
||||
|
||||
# notify-pr creates (or updates) a comment in a pull request to link to this workflow where the release artifacts are
|
||||
# being built.
|
||||
notify-pr:
|
||||
runs-on: ubuntu-x64-small
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
needs:
|
||||
- setup
|
||||
steps:
|
||||
- id: vault-secrets
|
||||
uses: grafana/shared-workflows/actions/get-vault-secrets@main
|
||||
with:
|
||||
repo_secrets: |
|
||||
GRAFANA_DELIVERY_BOT_APP_PEM=delivery-bot-app:PRIVATE_KEY
|
||||
- name: Generate token
|
||||
id: generate_token
|
||||
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
|
||||
with:
|
||||
app_id: ${{ vars.DELIVERY_BOT_APP_ID }}
|
||||
private_key: ${{ env.GRAFANA_DELIVERY_BOT_APP_PEM }}
|
||||
repositories: '["grafana"]'
|
||||
permissions: '{"issues": "write", "pull_requests": "write", "contents": "read"}'
|
||||
- name: Find PR
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }}
|
||||
GRAFANA_COMMIT: ${{ needs.setup.outputs.grafana-commit }}
|
||||
run: echo "ISSUE_NUMBER=$(gh api "/repos/grafana/grafana/commits/${GRAFANA_COMMIT}/pulls" | jq -r '.[0].number')" >> "$GITHUB_ENV"
|
||||
- name: Find Comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3
|
||||
id: fc
|
||||
with:
|
||||
issue-number: ${{ env.ISSUE_NUMBER }}
|
||||
comment-author: 'grafana-delivery-bot[bot]'
|
||||
body-includes: GitHub Actions Build
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- name: Create or update comment
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
comment-id: ${{ steps.fc.outputs.comment-id }}
|
||||
issue-number: ${{ env.ISSUE_NUMBER }}
|
||||
body: |
|
||||
:rocket: Your submission is now being built and packaged.
|
||||
|
||||
- [GitHub Actions Build](https://github.com/grafana/grafana/actions/runs/${{ github.run_id }})
|
||||
- Version: ${{ needs.setup.outputs.version }}
|
||||
edit-mode: replace
|
||||
|
||||
@@ -0,0 +1,147 @@
|
||||
name: Release NPM packages
|
||||
run-name: Publish NPM ${{ inputs.version_type }} ${{ inputs.version }}
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
grafana_commit:
|
||||
description: 'Grafana commit SHA to build against'
|
||||
required: true
|
||||
type: string
|
||||
version:
|
||||
description: 'Version to publish as'
|
||||
required: true
|
||||
type: string
|
||||
build_id:
|
||||
description: 'Run ID from the original release-build workflow'
|
||||
required: true
|
||||
type: string
|
||||
version_type:
|
||||
description: 'Version type (canary, nightly, stable)'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
grafana_commit:
|
||||
description: 'Grafana commit SHA to build against'
|
||||
required: true
|
||||
version:
|
||||
description: 'Version to publish as'
|
||||
required: true
|
||||
build_id:
|
||||
description: 'Run ID from the original release-build workflow'
|
||||
required: true
|
||||
version_type:
|
||||
description: 'Version type (canary, nightly, stable)'
|
||||
required: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
# If called with version_type 'canary' or 'stable', build + publish to NPM
|
||||
# If called with version_type 'nightly', just tag the given version with nightly tag. It was already published by the canary build.
|
||||
|
||||
publish:
|
||||
name: Publish NPM packages
|
||||
runs-on: github-hosted-ubuntu-x64-small
|
||||
if: inputs.version_type == 'canary' || inputs.version_type == 'stable'
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
steps:
|
||||
- name: Info
|
||||
env:
|
||||
GITHUB_REF: ${{ github.ref }}
|
||||
GRAFANA_COMMIT: ${{ inputs.grafana_commit }}
|
||||
run: |
|
||||
echo "GRAFANA_COMMIT: $GRAFANA_COMMIT"
|
||||
echo "github.ref: $GITHUB_REF"
|
||||
|
||||
- name: Checkout workflow ref
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
fetch-depth: 100
|
||||
fetch-tags: false
|
||||
|
||||
# this will fail with "{commit} is not a valid commit" if the commit is valid but
|
||||
# not in the last 100 commits.
|
||||
- name: Verify commit is in workflow HEAD
|
||||
env:
|
||||
GIT_COMMIT: ${{ inputs.grafana_commit }}
|
||||
run: ./.github/workflows/scripts/validate-commit-in-head.sh
|
||||
shell: bash
|
||||
|
||||
- name: Map version type to NPM tag
|
||||
id: npm-tag
|
||||
env:
|
||||
VERSION: ${{ inputs.version }}
|
||||
VERSION_TYPE: ${{ inputs.version_type }}
|
||||
REFERENCE_PKG: "@grafana/runtime"
|
||||
run: |
|
||||
TAG=$(./.github/workflows/scripts/determine-npm-tag.sh)
|
||||
echo "NPM_TAG=$TAG" >> "$GITHUB_OUTPUT"
|
||||
shell: bash
|
||||
|
||||
- name: Checkout build commit
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ inputs.grafana_commit }}
|
||||
|
||||
- name: Setup Node
|
||||
uses: ./.github/actions/setup-node
|
||||
|
||||
# Trusted Publishing is only available in npm v11.5.1 and later
|
||||
- name: Update npm
|
||||
run: npm install -g npm@^11.5.1
|
||||
|
||||
- name: Install dependencies
|
||||
run: yarn install --immutable
|
||||
|
||||
- name: Typecheck packages
|
||||
run: yarn run packages:typecheck
|
||||
|
||||
- name: Version, build, and pack packages
|
||||
env:
|
||||
VERSION: ${{ inputs.version }}
|
||||
run: |
|
||||
yarn run packages:build
|
||||
yarn lerna version "$VERSION" \
|
||||
--exact \
|
||||
--no-git-tag-version \
|
||||
--no-push \
|
||||
--force-publish \
|
||||
--yes
|
||||
yarn run packages:pack
|
||||
|
||||
- name: Debug packed files
|
||||
run: tree -a ./npm-artifacts
|
||||
|
||||
- name: Validate packages
|
||||
run: ./scripts/validate-npm-packages.sh
|
||||
|
||||
- name: Debug OIDC Claims
|
||||
uses: github/actions-oidc-debugger@2e9ba5d3f4bebaad1f91a2cede055115738b7ae8
|
||||
with:
|
||||
audience: '${{ github.server_url }}/${{ github.repository_owner }}'
|
||||
|
||||
- name: Publish packages
|
||||
env:
|
||||
NPM_TAG: ${{ steps.npm-tag.outputs.NPM_TAG }}
|
||||
run: ./scripts/publish-npm-packages.sh --dist-tag "$NPM_TAG" --registry 'https://registry.npmjs.org/'
|
||||
|
||||
# TODO: finish this step
|
||||
tag-nightly:
|
||||
name: Tag nightly release
|
||||
runs-on: github-hosted-ubuntu-x64-small
|
||||
if: inputs.version_type == 'nightly'
|
||||
|
||||
steps:
|
||||
- name: Checkout workflow ref
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
# TODO: tag the given release with nightly
|
||||
|
||||
+66
@@ -0,0 +1,66 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
fail() { echo "Error: $*" >&2; exit 1; }
|
||||
|
||||
# Ensure required variables are set
|
||||
if [[ -z "${REFERENCE_PKG}" || -z "${VERSION_TYPE}" || -z "${VERSION}" ]]; then
|
||||
fail "Missing required environment variables: REFERENCE_PKG, VERSION_TYPE, VERSION"
|
||||
fi
|
||||
|
||||
semver_cmp () {
|
||||
IFS='.' read -r -a arr_a <<< "$1"
|
||||
IFS='.' read -r -a arr_b <<< "$2"
|
||||
|
||||
for i in 0 1 2; do
|
||||
local aa=${arr_a[i]:-0}
|
||||
local bb=${arr_b[i]:-0}
|
||||
# shellcheck disable=SC2004
|
||||
if (( 10#$aa > 10#$bb )); then echo gt; return 0; fi
|
||||
if (( 10#$aa < 10#$bb )); then echo lt; return 0; fi
|
||||
done
|
||||
|
||||
echo "eq"
|
||||
}
|
||||
|
||||
|
||||
STABLE_REGEX='^([0-9]+)\.([0-9]+)\.([0-9]+)$' # x.y.z
|
||||
PRE_REGEX='^([0-9]+)\.([0-9]+)\.([0-9]+)-([0-9]+)$' # x.y.z-123456
|
||||
|
||||
# Validate that the VERSION matches VERSION_TYPE
|
||||
# - stable must be x.y.z
|
||||
# - nightly/canary must be x.y.z-123456
|
||||
case "$VERSION_TYPE" in
|
||||
stable)
|
||||
[[ $VERSION =~ $STABLE_REGEX ]] || fail "For 'stable', version must match x.y.z" ;;
|
||||
nightly|canary)
|
||||
[[ $VERSION =~ $PRE_REGEX ]] || fail "For '$VERSION_TYPE', version must match x.y.z-123456" ;;
|
||||
*)
|
||||
fail "Unknown version_type '$VERSION_TYPE'" ;;
|
||||
esac
|
||||
|
||||
# Extract major, minor from VERSION
|
||||
IFS=.- read -r major minor patch _ <<< "$VERSION"
|
||||
|
||||
# Determine NPM tag
|
||||
case "$VERSION_TYPE" in
|
||||
canary) TAG="canary" ;;
|
||||
nightly) TAG="nightly" ;;
|
||||
stable)
|
||||
# Use npm dist-tag "latest" as the reference
|
||||
LATEST="$(npm view --silent "$REFERENCE_PKG" dist-tags.latest 2>/dev/null || true)"
|
||||
echo "Latest for $REFERENCE_PKG is ${LATEST:-<none>}" >&2
|
||||
|
||||
if [[ -z ${LATEST:-} ]]; then
|
||||
TAG="latest" # first ever publish
|
||||
else
|
||||
case "$(semver_cmp "$VERSION" "$LATEST")" in
|
||||
gt) TAG="latest" ;; # newer than reference -> latest
|
||||
lt|eq) TAG="v${major}.${minor}-latest" ;; # older or equal -> vX.Y-latest
|
||||
esac
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "Resolved NPM_TAG=$TAG (VERSION=$VERSION, current latest=${LATEST:-none})" 1>&2 # stderr
|
||||
printf '%s' "$TAG"
|
||||
+14
@@ -0,0 +1,14 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
if [[ -z "${GIT_COMMIT:-}" ]]; then
|
||||
echo "Error: Environment variable GIT_COMMIT is required"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if git merge-base --is-ancestor "$GIT_COMMIT" HEAD; then
|
||||
echo "Commit $GIT_COMMIT is contained in HEAD"
|
||||
else
|
||||
echo "Error: Commit $GIT_COMMIT is not contained in HEAD"
|
||||
exit 1
|
||||
fi
|
||||
@@ -1442,6 +1442,67 @@
|
||||
}
|
||||
],
|
||||
"type": "table"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "grafana-testdata-datasource"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
"mode": "thresholds"
|
||||
},
|
||||
"custom": {
|
||||
"align": "auto",
|
||||
"cellOptions": {
|
||||
"type": "auto"
|
||||
},
|
||||
"footer": {
|
||||
"reducers": ["lastNotNull", "countAll"]
|
||||
},
|
||||
"inspect": false
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": 0
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 80
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 6,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 24
|
||||
},
|
||||
"id": 9,
|
||||
"options": {
|
||||
"cellHeight": "sm",
|
||||
"showHeader": true
|
||||
},
|
||||
"pluginVersion": "12.2.0-pre",
|
||||
"targets": [
|
||||
{
|
||||
"csvContent": "a,b\nfoo,bar\nbaz,bim\nbop,boop",
|
||||
"datasource": {
|
||||
"type": "grafana-testdata-datasource"
|
||||
},
|
||||
"refId": "A",
|
||||
"scenarioId": "csv_content"
|
||||
}
|
||||
],
|
||||
"title": "No numeric fields",
|
||||
"type": "table"
|
||||
}
|
||||
],
|
||||
"preload": false,
|
||||
|
||||
+2
@@ -25,6 +25,8 @@ refs:
|
||||
- pattern: /docs/grafana-cloud/
|
||||
destination: /docs/grafana-cloud/account-management/authentication-and-permissions/access-control/custom-role-actions-scopes/#grafana-adaptive-metrics-action-definitions
|
||||
cloud-access-policies-action-definitions:
|
||||
- pattern: /docs/grafana/
|
||||
destination: docs/grafana/<GRAFANA_VERSION>/administration/roles-and-permissions/access-control/custom-role-actions-scopes/#cloud-access-policies-action-definitions
|
||||
- pattern: /docs/grafana-cloud/
|
||||
destination: /docs/grafana/<GRAFANA_VERSION>/administration/roles-and-permissions/access-control/custom-role-actions-scopes/#cloud-access-policies-action-definitions
|
||||
rbac-role-definitions:
|
||||
|
||||
@@ -298,16 +298,21 @@ groupByNode(summarize(movingAverage(apps.$app.$server.counters.requests.count, 5
|
||||
## Add ad hoc filters
|
||||
|
||||
_Ad hoc filters_ are one of the most complex and flexible variable options available.
|
||||
Instead of a regular list of variable options, this variable allows you to build a dashboard-wide ad hoc query.
|
||||
Instead of creating a variable for each dimension by which you want to filter, ad hoc filters automatically create variables (key/value pairs) for all the dimensions returned by your data source query.
|
||||
This allows you to apply filters dashboard-wide.
|
||||
|
||||
Ad hoc filters let you add label/value filters that are automatically added to all metric queries that use the specified data source.
|
||||
Unlike other variables, you don't use ad hoc filters in queries.
|
||||
Instead, you use ad hoc filters to write filters for existing queries.
|
||||
|
||||
{{< admonition type="note" >}}
|
||||
Not all data sources support ad hoc filters.
|
||||
Examples of those that do include Prometheus, Loki, InfluxDB, and Elasticsearch.
|
||||
{{< /admonition >}}
|
||||
The following data sources support ad hoc filters:
|
||||
|
||||
- Dashboard - Use this special data source to [apply ad hoc filters to data from unsupported data sources](#filter-any-data-using-the-dashboard-data-source).
|
||||
- Prometheus
|
||||
- Loki
|
||||
- InfluxDB
|
||||
- Elasticsearch
|
||||
- OpenSearch
|
||||
|
||||
To create an ad hoc filter, follow these steps:
|
||||
|
||||
@@ -324,6 +329,60 @@ To create an ad hoc filter, follow these steps:
|
||||
|
||||
Now you can [filter data on the dashboard](ref:filter-dashboard).
|
||||
|
||||
### Filter any data using the Dashboard data source
|
||||
|
||||
In cases where a data source doesn't support the use of ad hoc filters, you can use the Dashboard data source to reference that data, and then filter it in a new panel.
|
||||
This allows you to bypass the limitations of the data source in the source panel.
|
||||
|
||||
{{< figure src="/media/docs/grafana/panels-visualizations/screenshot-adhoc-filter-dashboard-ds-v12.2.png" max-width="750px" alt="The query section of a panel with the Dashboard data source configured" >}}
|
||||
|
||||
To use ad hoc filters on data from an unsupported data source, follow these steps:
|
||||
|
||||
1. Navigate to the dashboard with the panel with the data you want to filter.
|
||||
1. Click **Edit** in top-right corner of the dashboard.
|
||||
1. At the top of the dashboard, click **Add** and select **Visualization** in the drop-down list.
|
||||
1. In the **Queries** tab of the edit panel view, enter `Dashboard` in the **Data source** field and select **-- Dashboard --**.
|
||||
1. In the query configuration section, make the following selections:
|
||||
- **Source panel** - Choose the panel with the source data.
|
||||
- **Data** - Select **All Data** to use the data of the panel, and not just the annotations. This is the default selection.
|
||||
- **AdHoc Filters** - Toggle on the switch to make the data from the referenced panel filterable.
|
||||
|
||||
{{< admonition type="note">}}
|
||||
If you're referencing multiple panels in a dashboard with the Dashboard data source, you can only use one of those source panels at a time for ad hoc filtering.
|
||||
{{< /admonition >}}
|
||||
|
||||
1. Configure any other needed options for the panel.
|
||||
1. Click **Save dashboard**.
|
||||
|
||||
Now you can filter the data from the source panel by way of the Dashboard data source.
|
||||
Add as many panels as you need.
|
||||
|
||||
### Dashboard drilldown with ad hoc filters
|
||||
|
||||
In table and bar chart visualizations, you can apply ad hoc filters directly from the visualization.
|
||||
To quickly apply ad hoc filter variables, follow these steps:
|
||||
|
||||
1. To display the filter icons, hover your cursor over the table cell with the value for which you want to filter. In this example, the cell value is `ConfigMap Updated`, which is in the `alertname` column:
|
||||
|
||||
{{< figure src="/media/docs/grafana/panels-visualizations/screenshot-adhoc-filter-icon-v12.2.png" max-width="550px" alt="Table and bar chart with ad hoc filter icon displayed on a table cell" >}}
|
||||
|
||||
In bar chart visualizations, hover and click the bar to display the filter button:
|
||||
|
||||
{{< figure src="/media/docs/grafana/panels-visualizations/screenshot-adhoc-filter-icon-bar-v12.2.png" max-width="300px" alt="The ad hoc filter button in a bar chart tooltip">}}
|
||||
|
||||
1. Click the add filter icon.
|
||||
|
||||
The variable pair `alertname = ConfigMap Updated` is added to the ad hoc filter and all panels using the same data source that include that variable value are filtered by that value:
|
||||
|
||||
{{< figure src="/media/docs/grafana/panels-visualizations/screenshot-adhoc-filter-applied-v12.2.png" max-width="550px" alt="Table and bar chart, filtered" >}}
|
||||
|
||||
If one of the panels in the dashboard using that data source doesn't include that variable value, the panel won't return any data. In this example, the variable pair `_name_ = ALERTS` has been added to the ad hoc filter so the bar chart doesn't return any results:
|
||||
|
||||
{{< figure src="/media/docs/grafana/panels-visualizations/screenshot-adhoc-filter-no-data-v12.2.png" max-width="650px" alt="Table, filtered and bar chart returning no results" >}}
|
||||
|
||||
In cases where the data source you're using doesn't support ad hoc filtering, consider using the special Dashboard data source.
|
||||
For more information, refer to [Filter any data using the Dashboard data source](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/dashboards/variables/add-template-variables/#filter-any-data-using-the-dashboard-data-source).
|
||||
|
||||
<!-- vale Grafana.Spelling = YES -->
|
||||
<!-- vale Grafana.WordList = YES -->
|
||||
|
||||
|
||||
@@ -37,7 +37,7 @@ Use these steps to migrate resources between environments:
|
||||
|
||||
{{< admonition type="note" >}}
|
||||
Resources are pulled and pushed from the `./resources` directory by default.
|
||||
This directory can be configured with the `--directory`/`-d` flags.
|
||||
This can be configured with the `-p, --path` flags to specify custom paths on disk.
|
||||
{{< /admonition >}}
|
||||
|
||||
1. Make changes to dashboards and other resources using the Grafana UI in your **development instance**.
|
||||
@@ -45,21 +45,21 @@ This directory can be configured with the `--directory`/`-d` flags.
|
||||
|
||||
```bash
|
||||
grafanactl config use-context YOUR_CONTEXT # for example "dev"
|
||||
grafanactl resources pull -d ./resources/ -o yaml # or json
|
||||
grafanactl resources pull --path ./resources/ -o yaml # or json
|
||||
```
|
||||
|
||||
1. (Optional) Preview the resources locally before pushing:
|
||||
|
||||
```bash
|
||||
grafanactl config use-context YOUR_CONTEXT # for example "prod"
|
||||
grafanactl resources serve -d ./resources/
|
||||
grafanactl resources serve ./resources/
|
||||
```
|
||||
|
||||
1. Switch to the **production instance** and push the resources:
|
||||
|
||||
```bash
|
||||
grafanactl config use-context YOUR_CONTEXT # for example "prod"
|
||||
grafanactl resources push -d ./resources/
|
||||
grafanactl resources push -p ./resources/
|
||||
```
|
||||
|
||||
## Back up Grafana resources
|
||||
@@ -70,7 +70,7 @@ This workflow helps you back up all Grafana resources from one instance and late
|
||||
|
||||
```bash
|
||||
grafanactl config use-context YOUR_CONTEXT # for example "prod"
|
||||
grafanactl resources pull -d ./resources/ -o yaml # or json
|
||||
grafanactl resources pull --path ./resources/ -o yaml # or json
|
||||
```
|
||||
|
||||
1. Save the exported resources to version control or cloud storage.
|
||||
@@ -81,14 +81,14 @@ This workflow helps you back up all Grafana resources from one instance and late
|
||||
|
||||
```bash
|
||||
grafanactl config use-context YOUR_CONTEXT # for example "prod"
|
||||
grafanactl resources serve -d ./resources/
|
||||
grafanactl resources serve ./resources/
|
||||
```
|
||||
|
||||
1. To restore the resources later or restore them on another instance, push the saved resources:
|
||||
|
||||
```bash
|
||||
grafanactl config use-context YOUR_CONTEXT # for example "prod"
|
||||
grafanactl resources push -d ./resources/
|
||||
grafanactl resources push -p ./resources/
|
||||
```
|
||||
|
||||
## Manage dashboards as code
|
||||
@@ -114,7 +114,7 @@ With this workflow, you can define and manage dashboards as code, saving them to
|
||||
|
||||
```bash
|
||||
grafanactl config use-context YOUR_CONTEXT # for example "dev"
|
||||
grafanactl resources push -d ./resources/
|
||||
grafanactl resources push -p ./resources/
|
||||
```
|
||||
|
||||
## Explore and modify resources from the terminal
|
||||
@@ -197,7 +197,7 @@ Use this workflow to locate dashboards using a deprecated API version and mark t
|
||||
playlist.grafana.app v1 playlist
|
||||
```
|
||||
|
||||
1. Find dashboards that are still using an old API version:
|
||||
1. Find dashboards that are still using a deprecated API version:
|
||||
|
||||
```bash
|
||||
grafanactl resources get dashboards.v1.dashboard.grafana.app
|
||||
|
||||
@@ -23,52 +23,40 @@ Provisioning is an [experimental feature](https://grafana.com/docs/release-life-
|
||||
Sign up for Grafana Cloud Git Sync early access using [this form](https://forms.gle/WKkR3EVMcbqsNnkD9).
|
||||
{{< /admonition >}}
|
||||
|
||||
Using Provisioning, you can configure how to store your dashboard JSON files in either GitHub repositories using Git Sync or a local path.
|
||||
Provisioning is an experimental feature that allows you to configure how to store your dashboard JSONs and other files in GitHub repositories using either Git Sync or a local path.
|
||||
|
||||
Of the two experimental options, Git Sync is the recommended method for provisioning your dashboards. You can synchronize any new dashboards and changes to existing dashboards to your configured GitHub repository.
|
||||
If you push a change in the repository, those changes are mirrored in your Grafana instance.
|
||||
For more information on configuring Git Sync, refer to [Set up Git Sync](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/observability-as-code/provision-resources/git-sync-setup).
|
||||
Of the two options, **Git Sync** is the favorited method for provisioning your dashboards. You can synchronize any new dashboards and changes to existing dashboards from the UI to your configured GitHub repository. If you push a change in the repository, those changes are mirrored in your Grafana instance. See [Git Sync workflow](#git-sync-workflow).
|
||||
|
||||
Refer to [Set up file provisioning](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/observability-as-code/provision-resources/file-path-setup/) to learn more about the version of local file provisioning in Grafana 12.
|
||||
Alternatively, **local file provisioning** allows you to include in your Grafana instance resources (such as folders and dashboard JSON files) that are stored in a local file system. See [Local file workflow](local-file-workflow).
|
||||
|
||||
## Provisioned folders and connections
|
||||
|
||||
Dashboards and folders saved to the local path are referred to as "provisioned" resources and are labeled as such in the Grafana UI.
|
||||
|
||||
Dashboards saved in your GitHub repository or local folder configured appear in a provisioned folder in Grafana.
|
||||
The dashboards saved in your GitHub repository or local folder appear in Grafana in the 'provisioned' folder. The dashboards and folders saved to the local path are referred to as 'provisioned' resources and are labeled as such in the Grafana UI. You can update any provisioned dashboard that is either stored within a GitHub repository (Git Sync workflow) or in a local file (local file workflow).
|
||||
|
||||
You can set a single folder, or multiple folders to a different repository, with up to 10 connections. Alternatively, your entire Grafana instance can be the provisioned folder.
|
||||
|
||||
## How it works
|
||||
## Git Sync workflow
|
||||
|
||||
A user decides to update a provisioned dashboard that is either stored within a GitHub repository (Git Sync workflow) or in a local file (local file workflow).
|
||||
In the Git Sync workflow:
|
||||
|
||||
### Git Sync workflow
|
||||
- When you provision resources with Git Sync you can modify them from within the Grafana UI or within the GitHub repository. Changes made in either the repository or the Grafana UI are bidirectional.
|
||||
- Any changes made in the provisioned files stored in the GitHub repository are reflected in the Grafana database. By default, Grafana polls GitHub every 60 seconds.
|
||||
- The Grafana UI reads from the database and updates the UI to reflect these changes.
|
||||
|
||||
Resources provisioned with Git Sync can be modified from within the Grafana UI or within the GitHub repository.
|
||||
Changes made in either the repository or the Grafana UI are bidirectional.
|
||||
For example, if you update a dashboard within the Grafana UI and click **Save** to preserve the changes, you'll be notified that the dashboard is provisioned in a GitHub repository. Next you'll be prompted to choose how to preserve the changes: either directly to a branch, or pushed to a new branch using a pull request in GitHub.
|
||||
|
||||
For example, when a user updates dashboards within the Grafana UI, they choose **Save** to preserve the changes.
|
||||
Grafana notifies them that the dashboard is provisioned in a GitHub repository.
|
||||
They choose how to preserve their changes: either saved directly to a branch or pushed to a new branch using a pull request in GitHub.
|
||||
If they chose a new branch, then they open the pull request and follow their normal workflow.
|
||||
For more information, see [Introduction to Git Sync](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/observability-as-code/provision-resources/intro-git-sync) and [Set up Git Sync](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/observability-as-code/provision-resources/git-sync-setup).
|
||||
|
||||
Grafana polls GitHub at a regular interval.
|
||||
The connection is established using a personal access token for authorization.
|
||||
With the webhooks feature enabled, repository notifications appear almost immediately.
|
||||
Without webhooks, Grafana polls for changes at the specified interval.
|
||||
The default polling interval is 60 seconds.
|
||||
## Local file workflow
|
||||
|
||||
Any changes made in the provisioned files stored in the GitHub repository are reflected in the Grafana database.
|
||||
The Grafana UI reads the database and updates the UI to reflect these changes.
|
||||
In the local file workflow:
|
||||
|
||||
### Local file workflow
|
||||
- All provisioned resources are changed in the local files.
|
||||
- Any changes made in the provisioned files are reflected in the Grafana database.
|
||||
- The Grafana UI reads the database and updates the UI to reflect these changes.
|
||||
- You can't use the Grafana UI to edit or delete provisioned resources.
|
||||
|
||||
In the local file workflow, all provisioned resources are changed in the local files.
|
||||
The user can't use the Grafana UI to edit or delete provisioned resources.
|
||||
|
||||
Any changes made in the provisioned files are reflected in the Grafana database.
|
||||
The Grafana UI reads the database and updates the UI to reflect these changes.
|
||||
Learn more in [Set up file provisioning](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/observability-as-code/provision-resources/file-path-setup/).
|
||||
|
||||
## Explore provisioning
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ Sign up for Grafana Cloud Git Sync early access using [this form](https://forms.
|
||||
|
||||
{{< /admonition >}}
|
||||
|
||||
File provisioning in Grafana lets you include resources, including folders and dashboard JSON files, that are stored in a local file system.
|
||||
Use local file provisioning to include in your Grafana instance resources (such as folders and dashboard JSON files) that are stored in a local file system.
|
||||
|
||||
This page explains how to set up local file provisioning.
|
||||
|
||||
@@ -48,7 +48,7 @@ Refer to [Provision Grafana](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/
|
||||
### Limitations
|
||||
|
||||
- A provisioned dashboard can't be deleted from within Grafana UI. The dashboard has to be deleted at the local file system and those changes synced to Grafana.
|
||||
- Changes from the local file system are one way: you can't save changes from
|
||||
- Changes from the local file system are one way: you can't save changes from the UI to GitHub.
|
||||
|
||||
## Before you begin
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ title: Git Sync
|
||||
weight: 100
|
||||
---
|
||||
|
||||
# Git Sync
|
||||
# Introduction to Git Sync
|
||||
|
||||
{{< admonition type="caution" >}}
|
||||
Git Sync is an [experimental feature](https://grafana.com/docs/release-life-cycle/) introduced in Grafana v12 for open source and Enterprise editions. Engineering and on-call support is not available. Documentation is either limited or not provided outside of code comments. No SLA is provided. Enable the `provisioning` and `kubernetesDashboards` feature toggles in Grafana to use this feature. This feature is not publicly available in Grafana Cloud yet. Only the cloud-hosted version of GitHub (GitHub.com) is supported at this time. GitHub Enterprise is not yet compatible.
|
||||
|
||||
@@ -12,7 +12,7 @@ labels:
|
||||
- enterprise
|
||||
- oss
|
||||
title: Manage provisioned repositories with Git Sync
|
||||
menuTitle: Manage repositories
|
||||
menuTitle: Manage repositories with Git Sync
|
||||
weight: 400
|
||||
---
|
||||
|
||||
@@ -25,8 +25,7 @@ Sign up for Grafana Cloud Git Sync early access using [this form](https://forms.
|
||||
|
||||
{{< /admonition >}}
|
||||
|
||||
After you have set up Git Sync, you can synchronize dashboards and changes to existing dashboards to your configured GitHub repository.
|
||||
If you push a change in the repository, those changes are mirrored in your Grafana instance.
|
||||
After you have set up Git Sync, you can synchronize any changes in your existing dashboards with your configured GitHub repository. Similarly, if you push a change in the repository, those changes are mirrored in your Grafana instance.
|
||||
|
||||
## View current status of synchronization
|
||||
|
||||
|
||||
@@ -278,6 +278,17 @@ When linking to another dashboard that uses template variables, select variable
|
||||
|
||||
If you want to add all of the current dashboard's variables to the URL, then use `${__all_variables}`.
|
||||
|
||||
When you link to another dashboard, ensure that:
|
||||
|
||||
- The target dashboard has the same variable name. If it doesn't (for example, `server` in the source dashboard and `host` in the target), you must align them or explicitly map values (for example, `&var-host=${server}`).
|
||||
- You use the variable _name_, and not the label. Labels are only used as display text and aren't recognized in URLs.
|
||||
|
||||
For example, if you have a variable with the name `var-server` and the label `ChooseYourServer`, you must use `var-server` in the URL, as shown in the following table:
|
||||
|
||||
| Correct link | Incorrect link |
|
||||
| ---------------------------------------------- | -------------------------------------------------------- |
|
||||
| `/d/xxxx/dashboard-b?orgId=1&var-server=web02` | `/d/xxxx/dashboard-b?orgId=1&var-ChooseYourServer=web02` |
|
||||
|
||||
## Add data links or actions {#add-a-data-link}
|
||||
|
||||
The following tasks describe how to configure data links and actions.
|
||||
@@ -296,9 +307,7 @@ To add a data link, follow these steps:
|
||||
This is a human-readable label for the link displayed in the UI. This is a required field.
|
||||
|
||||
1. Enter the **URL** to which you want to link.
|
||||
|
||||
To add a data link variable, click in the **URL** field and enter `$` or press Ctrl+Space or Cmd+Space to see a list of available variables. This is a required field.
|
||||
|
||||
1. (Optional) To add a data link variable, click in the **URL** field and enter `$` or press Ctrl+Space or Cmd+Space to see a list of available variables.
|
||||
1. If you want the link to open in a new tab, toggle the **Open in a new tab** switch.
|
||||
1. If you want the data link to open with a single click on the visualization, toggle the **One click** switch.
|
||||
|
||||
|
||||
@@ -121,7 +121,7 @@ query_result(max_over_time(<metric>[${__range_s}s]) != <state>)
|
||||
{{< admonition type="note" >}}
|
||||
Saved queries is currently in [public preview](https://grafana.com/docs/release-life-cycle/). Grafana Labs offers limited support, and breaking changes might occur prior to the feature being made generally available.
|
||||
|
||||
This feature is only available on Grafana Enterprise and Grafana Cloud.
|
||||
This feature is only available on Grafana Enterprise and Grafana Cloud. It will gradually roll out to all Grafana Cloud users with no action required. To try out this feature on Grafana Enterprise, enable the `queryLibrary` feature toggle.
|
||||
{{< /admonition >}}
|
||||
|
||||
You can save queries that you've created so they can be reused by you and others in your organization.
|
||||
|
||||
@@ -19,7 +19,7 @@ refs:
|
||||
|
||||
# SQL expressions
|
||||
|
||||
{{< docs/private-preview product="SQL expressions" >}}
|
||||
{{< docs/public-preview product="SQL expressions" >}}
|
||||
|
||||
SQL Expressions are server-side expressions that manipulate and transform the results of data source queries using MySQL-like syntax. They allow you to easily query and transform your data after it has been queried, using SQL, which provides a familiar and powerful syntax that can handle everything from simple filters to highly complex, multi-step transformations.
|
||||
|
||||
@@ -60,11 +60,17 @@ A key capability of SQL expressions is the ability to JOIN data from multiple ta
|
||||
|
||||
To work with SQL expressions, you must use data from a backend data source. In Grafana, a backend data source refers to a data source plugin or integration that communicates with a database, service, or API through the Grafana server, rather than directly from the browser (frontend).
|
||||
|
||||
## Known limitations
|
||||
|
||||
- Currently, only one SQL expression is supported per panel or alert.
|
||||
- Grafana supports certain data sources. Refer to [compatible data sources](#compatible-data-sources) for a current list.
|
||||
- Autocomplete is available, but column/field autocomplete is only available after enabling the `sqlExpressionsColumnAutoComplete` feature toggle, which is provided on an experimental basis.
|
||||
|
||||
## Compatible data sources
|
||||
|
||||
The following are compatible data sources:
|
||||
|
||||
**Full support:** All query types for each data source are supported.
|
||||
**Full support:** Grafana supports all query types for each of these data sources.
|
||||
|
||||
- Elasticsearch
|
||||
- MySQL
|
||||
@@ -73,7 +79,7 @@ The following are compatible data sources:
|
||||
- Google Sheets
|
||||
- Amazon Athena
|
||||
|
||||
**Partial support:** The following data sources offer limited or conditional support. Some allow different types of queries, depending on the service being accessed. For example, Azure Monitor can query multiple services, each with its own query format. In some cases, you can also change the query type within a panel.
|
||||
**Partial support:** The following data sources have limited or conditional support. Some support multiple query types depending on the service. For example, Azure Monitor can query multiple services, each with its own query format. In some cases, you can also switch the query type within a panel.
|
||||
|
||||
- InfluxDB
|
||||
- Infinity
|
||||
@@ -97,6 +103,10 @@ To create a SQL expression, complete the following steps:
|
||||
|
||||
After you have added a SQL expression, you can select from other data source queries by referencing the RefIDs of the queries in your SQL expression as if they were tables in a SQL database.
|
||||
|
||||
{{< admonition type="note" >}}
|
||||
The **RefID** is a unique identifier assigned to each query within a Grafana panel that serves as a reference name for that query's data.
|
||||
{{< /admonition >}}
|
||||
|
||||

|
||||
|
||||
## Workflow to build SQL expressions
|
||||
@@ -134,22 +144,65 @@ The SQL expression workflow in Grafana is designed with the following behaviors:
|
||||
|
||||
- **Non-tabular or incorrectly shaped data will not render in certain panels.** Visualizations such as graphs or gauges require properly structured data. Mismatched formats will result in rendering issues or missing data.
|
||||
|
||||
For data to be used in SQL expressions, it must be in a **tabular format**, specifically the **FullLong format**. This means all relevant data is contained within a single table, with values such as metric labels stored as columns and individual cells. Because not all data sources return results in this format by default, Grafana will automatically convert compatible query results to FullLong format when they are referenced in a SQL expression.
|
||||
|
||||
## SQL conversion rules
|
||||
|
||||
When a RefID is referenced within a SQL statement (e.g., `SELECT * FROM A`), the system invokes a distinct SQL conversion process.
|
||||
When you reference a RefID within a SQL statement (e.g., `SELECT * FROM A`), the system invokes a distinct SQL conversion process.
|
||||
|
||||
The SQL conversion path:
|
||||
|
||||
- The query result is treated as a single data frame, without labels, and is mapped directly to a tabular format.
|
||||
- If the frame type is present and is either numeric, wide time series, or multi-frame time series (for example, labeled formats), Grafana automatically converts the data into a table structure.
|
||||
- The query result appears as a single data frame, without labels, and is mapped directly to a tabular format.
|
||||
- If the frame type is present and is either numeric, wide time series, or multi-frame time series (for example: labeled formats), Grafana automatically converts the data into a table structure.
|
||||
|
||||
## Known limitations
|
||||
## Supported functions
|
||||
|
||||
- Currently, only one SQL expression is supported per panel or alert.
|
||||
- Grafana supports certain data sources. Refer to [compatible data sources](#compatible-data-sources) for a current list.
|
||||
- Autocomplete is available, but column/field autocomplete is only available after enabling the `sqlExpressionsColumnAutoComplete` feature toggle, which is provided on an experimental basis.
|
||||
Grafana maintains a complete list of supported SQL keywords, operators, and functions in the SQL expressions query validator implementation.
|
||||
|
||||
For the most up-to-date reference of all supported SQL functionality, refer to the `allowedNode` and `allowedFunction` definitions in the Grafana [codebase](https://github.com/grafana/grafana/blob/main/pkg/expr/sql/parser_allow.go).
|
||||
|
||||
## Alerting and recording rules
|
||||
|
||||
SQL expressions integrates alerting and recording rules, allowing you to define complex conditions and metrics using standard SQL queries. The system processes your query results and automatically creates alert instances or recorded metrics based on the returned data structure.
|
||||
|
||||
For SQL Expressions to work properly with alerting and recording rules, your query must return:
|
||||
|
||||
- One numeric column - **_required_**. This contains the value that triggers alerts or gets recorded.
|
||||
- Unique string column combinations - **_required_**. Each row must have a unique combination of string column values.
|
||||
- One or more string columns - _optional_. These become **labels** for the alert instances or metrics. Examples: `service`, `region`.
|
||||
|
||||
Consider the following query results:
|
||||
|
||||
```sql
|
||||
error_count,service,region
|
||||
25,auth-service,us-east
|
||||
0,payment-service,us-west
|
||||
15,user-service,eu-west
|
||||
```
|
||||
|
||||
This query returns:
|
||||
|
||||
- the numeric column `error_count` (values: 25, 0, 15)
|
||||
- the string columns `service` and `region`
|
||||
|
||||
For alert rules, this creates three alert instances:
|
||||
|
||||
- First instance with labels {service=auth-service, region=us-east} and value 25 (triggers alert - high error count)
|
||||
- Second instance with labels {service=payment-service, region=us-west} and value 0 (no alert - zero errors)
|
||||
- Third instance with labels {service=user-service, region=eu-west} and value 15 (triggers alert - elevated error count)
|
||||
|
||||
For recording rules, creates one metric with three series:
|
||||
|
||||
- First series: error_count_total{service=auth-service, region=us-east} 25
|
||||
- Second series: error_count_total{service=payment-service, region=us-west} 0
|
||||
- Third series: error_count_total{service=user-service, region=eu-west} 15
|
||||
|
||||
Following are some best practices for alerting and recording rules:
|
||||
|
||||
- Keep numeric values meaningful (for example: error counts, request duration).
|
||||
- Use clear, descriptive column names - these become your labels.
|
||||
- Keep string values short and consistent.
|
||||
- Avoid too many unique label combinations, as this can result in high cardinality.
|
||||
- Always use `GROUP BY` to avoid duplicate label errors.
|
||||
- Aggregate numeric values logically (for example: `SUM(error_count)`).
|
||||
|
||||
## Supported data source formats
|
||||
|
||||
@@ -202,3 +255,19 @@ During conversion:
|
||||
2. Add the SQL expression `SELECT * from A`. After you add a SQL expression that selects from RefID A, Grafana converts it to a table response:
|
||||
|
||||

|
||||
|
||||
## LLM integration
|
||||
|
||||
The Grafana LLM plugin seamlessly integrates AI-powered assistance into your SQL expressions workflow.
|
||||
|
||||
{{< admonition type="note" >}}
|
||||
The Grafana LLM plugin is currently in public preview, meaning Grafana offers limited support, and breaking changes might occur prior to the feature being made generally available.
|
||||
{{< /admonition >}}
|
||||
|
||||
To use this integration, first [install and configure the LLM plugin](https://grafana.com/grafana/plugins/grafana-llm-app/). After installation, open your dashboard and select **Edit** to open the panel editor. Navigate to the **Queries** tab and scroll to the bottom where you'll find two new buttons positioned to the right of the **Run query** button in your SQL Expressions query.
|
||||
|
||||
{{< figure src="/media/docs/sql-expressions/sqlexpressions-LLM-integration-v12.2.png" caption="LLM integration" >}}
|
||||
|
||||
Click **Explain query** to open a drawer that displays a detailed explanation of your query, including its interpreted business meaning and performance statistics. Once the explanation is generated, the button changes to **View explanation**.
|
||||
|
||||
Click **Improve query** to open a suggestions drawer that contains performance and reliability enhancements, column naming best practices, and guidance on panel optimization. Click **Apply** to implement a suggestion. After you’ve interacted with the interface, you'll see a **Suggestions** button for quick access. Newer suggestions appear at the top, with older ones listed below, creating a history of improvements. If your SQL query has a parsing error, such as a syntax issue, the LLM will attempt to provide a corrected version. The LLM automatically identifies errors and helps you rewrite the query correctly.
|
||||
|
||||
@@ -88,6 +88,22 @@ While the first field can be time-based and you can use a bar chart to plot time
|
||||
|
||||
We recommend that you only use one dataset in a bar chart because using multiple datasets can result in unexpected behavior.
|
||||
|
||||
<!-- vale Grafana.WordList = NO -->
|
||||
<!-- vale Grafana.Spelling = NO -->
|
||||
|
||||
## Apply ad hoc filters from the bar chart
|
||||
|
||||
In bar charts, you can apply ad hoc filters directly from the visualization.
|
||||
|
||||
To display the filter button, hover your cursor over the bar that has the value for which you want to filter and click the bar:
|
||||
|
||||
{{< figure src="/media/docs/grafana/panels-visualizations/screenshot-adhoc-filter-icon-bar-v12.2.png" max-width="300px" alt="The ad hoc filter button in a bar chart tooltip">}}
|
||||
|
||||
For more information about applying ad hoc filters this way, refer to [Dashboard drilldown with ad hoc filters](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/dashboards/variables/add-template-variables/#dashboard-drilldown-with-ad-hoc-filters).
|
||||
|
||||
<!-- vale Grafana.Spelling = YES -->
|
||||
<!-- vale Grafana.WordList = YES -->
|
||||
|
||||
## Configuration options
|
||||
|
||||
{{< docs/shared lookup="visualizations/config-options-intro.md" source="grafana" version="<GRAFANA_VERSION>" >}}
|
||||
|
||||
@@ -238,6 +238,6 @@ Optional fields:
|
||||
| arc\_\_\* | number | Any field prefixed with `arc__` will be used to create the color circle around the node. All values in these fields should add up to 1. You can specify color using `config.color.fixedColor`. |
|
||||
| detail\_\_\* | string/number | Any field prefixed with `detail__` will be shown in the header of context menu when clicked on the node. Use `config.displayName` for more human readable label. |
|
||||
| color | string/number | Can be used to specify a single color instead of using the `arc__` fields to specify color sections. It can be either a string which should then be an acceptable HTML color string or it can be a number in which case the behavior depends on `field.config.color.mode` setting. This can be for example used to create gradient colors controlled by the field value. |
|
||||
| icon | string | Name of the icon to show inside the node instead of the default stats. Only Grafana [built in icons](https://developers.grafana.com/ui/latest/index.html?path=/story/docs-overview-icon--icons-overview)) are allowed. |
|
||||
| icon | string | Name of the icon to show inside the node instead of the default stats. Only Grafana [built in icons](https://developers.grafana.com/ui/latest/index.html?path=/story/iconography-icon--icons-overview)) are allowed. |
|
||||
| nodeRadius | number | Radius value in pixels. Used to manage node size. |
|
||||
| highlighted | boolean | Sets whether the node should be highlighted. Useful for example to represent a specific path in the graph by highlighting several nodes and edges. Default: `false` |
|
||||
|
||||
@@ -174,6 +174,22 @@ Columns with filters applied have a blue filter displayed next to the title.
|
||||
|
||||
To remove the filter, click the blue filter icon and then click **Clear filter**.
|
||||
|
||||
<!-- vale Grafana.WordList = NO -->
|
||||
<!-- vale Grafana.Spelling = NO -->
|
||||
|
||||
### Apply ad hoc filters from the table
|
||||
|
||||
In tables, you can apply ad hoc filters directly from the visualization with one click.
|
||||
|
||||
To display the filter icons, hover your cursor over the cell that has the value for which you want to filter:
|
||||
|
||||
{{< figure src="/media/docs/grafana/panels-visualizations/screenshot-table-adhoc-filter-v12.2.png" max-width="500px" alt="Table with ad hoc filter icon displayed on a cell" >}}
|
||||
|
||||
For more information about applying ad hoc filters this way, refer to [Dashboard drilldown with ad hoc filters](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/dashboards/variables/add-template-variables/#dashboard-drilldown-with-ad-hoc-filters).
|
||||
|
||||
<!-- vale Grafana.Spelling = YES -->
|
||||
<!-- vale Grafana.WordList = YES -->
|
||||
|
||||
## Sort columns
|
||||
|
||||
Click a column title to change the sort order from default to descending to ascending.
|
||||
@@ -408,7 +424,7 @@ However, you can switch back and forth between tabs.
|
||||
|
||||
The **Pill** cell type displays each item in a comma-separated string in a colored block.
|
||||
|
||||
{{< figure src="/media/docs/grafana/panels-visualizations/screenshot-table-pills-v12.1.png" max-width="750px" alt="Table using the pill cell type" >}}
|
||||
{{< figure src="/media/docs/grafana/panels-visualizations/screenshot-table-pill-cells-v12.2.png" max-width="750px" alt="Table using the pill cell type" >}}
|
||||
|
||||
The colors applied to each piece of text are maintained throughout the table.
|
||||
For example, if the word "test" is first displayed in a red pill, it will always be displayed in a red pill.
|
||||
@@ -439,6 +455,8 @@ in these cells if the [`disable_sanitize_html`](https://grafana.com/docs/grafana
|
||||
Toggle on the **Tooltip from field** switch to use the values from another field (or column) in a tooltip.
|
||||
For more information, refer to [Tooltip from field](#tooltip-from-field).
|
||||
|
||||
{{< figure src="/media/docs/grafana/panels-visualizations/screenshot-table-markdown-v12.2.png" max-width="600px" alt="Table using the pill cell type" >}}
|
||||
|
||||
#### Image
|
||||
|
||||
If you have a field value that is an image URL or a base64 encoded image, this cell type displays it as an image.
|
||||
|
||||
+1
-1
@@ -33,7 +33,7 @@ Grafana provides OAuth2 integrations for the following auth providers:
|
||||
|
||||
If your OAuth2 provider is not listed, you can use Generic OAuth authentication.
|
||||
|
||||
This topic describes how to configure Generic OAuth authentication using different methods and includes [examples of setting up Generic OAuth](#examples-of-setting-up-generic-oauth2) with specific OAuth2 providers.
|
||||
This topic describes how to configure Generic OAuth authentication using different methods and includes [examples of setting up Generic OAuth](#examples-of-setting-up-generic-oauth) with specific OAuth2 providers.
|
||||
|
||||
## Before you begin
|
||||
|
||||
|
||||
+2
-2
@@ -29,7 +29,7 @@ SAML authentication integration allows your Grafana users to log in by using an
|
||||
|
||||
You can configure SAML authentication in Grafana through one of the following methods:
|
||||
|
||||
- [Configure SAML using Grafana configuration file](#configure-saml-using-the-grafana-config-file)
|
||||
- [Configure SAML using the Grafana configuration file](#configure-saml-using-the-grafana-configuration-file)
|
||||
- Configure SAML using the [SSO Settings API](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/developers/http_api/sso-settings/)
|
||||
- Configure SAML using the [SAML user interface](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/setup-grafana/configure-security/configure-authentication/saml/saml-ui/)
|
||||
- Configure SAML using the [Grafana Terraform provider](https://registry.terraform.io/providers/grafana/grafana/<GRAFANA_VERSION>/docs/resources/sso_settings)
|
||||
@@ -237,7 +237,7 @@ To allow Grafana to initiate a POST request to the IdP, update the `content_secu
|
||||
For Grafana Cloud instances, please contact Grafana Support to update the `content_security_policy_template` and `content_security_policy_report_only_template` settings of your Grafana instance. Please provide the metadata URL/file of your IdP.
|
||||
{{< /admonition >}}
|
||||
|
||||
## IdP-initiated login
|
||||
## IdP-initiated Single Sign-On (SSO)
|
||||
|
||||
By default, Grafana allows only service provider (SP) initiated logins (when the user logs in with SAML via the login page in Grafana). If you want users to log in into Grafana directly from your identity provider (IdP), set the `allow_idp_initiated` configuration option to `true` and configure `relay_state` with the same value specified in the IdP configuration.
|
||||
|
||||
|
||||
+15
-15
@@ -22,29 +22,17 @@ System for Cross-domain Identity Management (SCIM) is an open standard that allo
|
||||
{{< admonition type="note" >}}
|
||||
Available in [Grafana Enterprise](/docs/grafana/<GRAFANA_VERSION>/introduction/grafana-enterprise/) and select Grafana Cloud plans in [public preview](https://grafana.com/docs/release-life-cycle/).
|
||||
Grafana Labs offers limited support, and breaking changes might occur prior to the feature being made generally available.
|
||||
{{< /admonition >}}
|
||||
|
||||
{{< admonition type="warning" >}}
|
||||
**Public Preview:** SCIM provisioning is currently in Public Preview. While functional, the feature is actively being refined and may undergo changes. We recommend thorough testing in non-production environments before deploying to production systems.
|
||||
{{< /admonition >}}
|
||||
|
||||
{{< admonition type="note" >}}
|
||||
This feature is behind the `enableSCIM` feature toggle.
|
||||
You can enable feature toggles through configuration file or environment variables.
|
||||
|
||||
For more information, refer to the [feature toggles documentation](/docs/grafana/<GRAFANA_VERSION>/setup-grafana/configure-grafana/#feature_toggles).
|
||||
|
||||
{{< /admonition >}}
|
||||
|
||||
{{< admonition type="warning" title="Critical: Aligning SAML Identifier with SCIM externalId" >}}
|
||||
When using SAML for authentication alongside SCIM provisioning, a critical security measure is to ensure proper alignment between the the SCIM user's `externalId` and the SAML user identifier. The unique identifier used for SCIM provisioning (which becomes the `externalId` in Grafana, often sourced from a stable IdP attribute like Azure AD's `user.objectid`) **must also be sent as a claim in the SAML assertion from your Identity Provider.**
|
||||
Furthermore, the Grafana SAML configuration must be correctly set up to identify and use this specific claim for linking the authenticated SAML user to their SCIM-provisioned user. This can be achieved by either ensuring the primary SAML login identifier by using the `assertion_attribute_external_uid` setting in Grafana to explicitly set the name of the SAML claim that contains the stable unique identifier attribute.
|
||||
{{< admonition type="warning" >}}
|
||||
|
||||
**Why is this important?**
|
||||
A mismatch or inconsistent mapping between this SAML login identifier and the SCIM `externalId` creates a critical security vulnerability. If these two identifiers are not reliably and uniquely aligned for each individual user, Grafana may fail to correctly link an authenticated SAML session to the intended SCIM-provisioned user profile and its associated permissions. This can enable a malicious actor to impersonate another user—for instance, by crafting a SAML assertion that, due to the identifier misalignment, incorrectly grants them the access rights of the targeted user.
|
||||
|
||||
Grafana relies on this linkage to correctly associate the authenticated user from SAML with the provisioned user from SCIM. Failure to ensure a consistent and unique identifier across both systems can break this linkage, leading to incorrect user mapping and potential unauthorized access.
|
||||
|
||||
Always verify that your SAML identity provider is configured to send a stable, unique user identifier that your SCIM configuration maps to `externalId`. Refer to your identity provider's documentation and the specific Grafana SCIM integration guides (e.g., for [Azure AD](configure-scim-with-azuread/) or [Okta](configure-scim-with-okta/)) for detailed instructions on configuring these attributes correctly.
|
||||
**Public Preview:** SCIM provisioning is currently in Public Preview. While functional, the feature is actively being refined and may undergo changes. We recommend thorough testing in non-production environments before deploying to production systems.
|
||||
{{< /admonition >}}
|
||||
|
||||
## Benefits
|
||||
@@ -63,6 +51,18 @@ SCIM offers several advantages for managing users and teams in Grafana:
|
||||
|
||||
## Authentication and access requirements
|
||||
|
||||
{{< admonition type="warning" title="Critical: Aligning SAML Identifier with SCIM externalId" >}}
|
||||
When using SAML for authentication alongside SCIM provisioning, a critical security measure is to ensure proper alignment between the the SCIM user's `externalId` and the SAML user identifier. The unique identifier used for SCIM provisioning (which becomes the `externalId` in Grafana, often sourced from a stable IdP attribute like Azure AD's `user.objectid`) **must also be sent as a claim in the SAML assertion from your Identity Provider.**
|
||||
Furthermore, the Grafana SAML configuration must be correctly set up to identify and use this specific claim for linking the authenticated SAML user to their SCIM-provisioned user. This can be achieved by either ensuring the primary SAML login identifier by using the `assertion_attribute_external_uid` setting in Grafana to explicitly set the name of the SAML claim that contains the stable unique identifier attribute.
|
||||
|
||||
**Why is this important?**
|
||||
A mismatch or inconsistent mapping between this SAML login identifier and the SCIM `externalId` creates a critical security vulnerability. If these two identifiers are not reliably and uniquely aligned for each individual user, Grafana may fail to correctly link an authenticated SAML session to the intended SCIM-provisioned user profile and its associated permissions. This can enable a malicious actor to impersonate another user—for instance, by crafting a SAML assertion that, due to the identifier misalignment, incorrectly grants them the access rights of the targeted user.
|
||||
|
||||
Grafana relies on this linkage to correctly associate the authenticated user from SAML with the provisioned user from SCIM. Failure to ensure a consistent and unique identifier across both systems can break this linkage, leading to incorrect user mapping and potential unauthorized access.
|
||||
|
||||
Always verify that your SAML identity provider is configured to send a stable, unique user identifier that your SCIM configuration maps to `externalId`. Refer to your identity provider's documentation and the specific Grafana SCIM integration guides (e.g., for [Azure AD](configure-scim-with-azuread/) or [Okta](configure-scim-with-okta/)) for detailed instructions on configuring these attributes correctly.
|
||||
{{< /admonition >}}
|
||||
|
||||
When you enable SCIM in Grafana, the following requirements and restrictions apply:
|
||||
|
||||
1. **Use the same identity provider for user provisioning and for authentication flow**: You must use the same identity provider for both authentication and user provisioning.
|
||||
|
||||
@@ -32,6 +32,12 @@ Alert notifications can include images, but rendering many images at the same ti
|
||||
|
||||
## Install Grafana Image Renderer plugin
|
||||
|
||||
{{< admonition type="caution" >}}
|
||||
Starting with Grafana v12.2, the Grafana Image Renderer plugin is deprecated and is no longer maintained.
|
||||
|
||||
Instead, use the Grafana Image Renderer remote rendering service.
|
||||
{{< /admonition >}}
|
||||
|
||||
{{< admonition type="note" >}}
|
||||
All PhantomJS support has been removed. Instead, use the Grafana Image Renderer plugin or remote rendering service.
|
||||
{{< /admonition >}}
|
||||
|
||||
@@ -0,0 +1,20 @@
|
||||
---
|
||||
description: Guide for upgrading to Grafana v12.2
|
||||
keywords:
|
||||
- grafana
|
||||
- configuration
|
||||
- documentation
|
||||
- upgrade
|
||||
- '12.2'
|
||||
title: Upgrade to Grafana v12.2
|
||||
menuTitle: Upgrade to v12.2
|
||||
weight: 498
|
||||
---
|
||||
|
||||
# Upgrade to Grafana v12.2
|
||||
|
||||
{{< docs/shared lookup="upgrade/intro_2.md" source="grafana" version="<GRAFANA_VERSION>" >}}
|
||||
|
||||
{{< docs/shared lookup="back-up/back-up-grafana.md" source="grafana" version="<GRAFANA_VERSION>" leveloffset="+1" >}}
|
||||
|
||||
{{< docs/shared lookup="upgrade/upgrade-common-tasks.md" source="grafana" version="<GRAFANA_VERSION>" >}}
|
||||
@@ -192,6 +192,7 @@ For a complete list of every change, with links to pull requests and related iss
|
||||
|
||||
## Grafana 12
|
||||
|
||||
- [What's new in 12.2](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/whatsnew/whats-new-in-v12-2)
|
||||
- [What's new in 12.1](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/whatsnew/whats-new-in-v12-1)
|
||||
- [What's new in 12.0](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/whatsnew/whats-new-in-v12-0)
|
||||
|
||||
|
||||
@@ -0,0 +1,72 @@
|
||||
---
|
||||
description: Feature and improvement highlights for Grafana v12.2
|
||||
keywords:
|
||||
- grafana
|
||||
- new
|
||||
- documentation
|
||||
- '12.2'
|
||||
- release notes
|
||||
labels:
|
||||
products:
|
||||
- cloud
|
||||
- enterprise
|
||||
- oss
|
||||
title: What's new in Grafana v12.2
|
||||
posts:
|
||||
- title: SQL expressions
|
||||
items:
|
||||
- whats-new/2025-09-05-sql-expressions.md
|
||||
- title: Dashboards and visualizations
|
||||
items:
|
||||
- whats-new/2025-08-22-new-table-visualization-is-generally-available.md
|
||||
- whats-new/2025-08-27-generate-tooltips-from-table-fields.md
|
||||
- whats-new/2025-08-27-improved-footer-for-table-visualization.md
|
||||
- whats-new/2025-07-17-disable-tooltips-in-canvas-visualizations.md
|
||||
- whats-new/2025-07-14-static-options-for-query-variable.md
|
||||
- whats-new/2025-07-24-dynamic-connection-direction-in-canvas.md
|
||||
- whats-new/2025-08-04-canvas-pan-zoom-improvements.md
|
||||
- whats-new/2025-09-01-actions-authentication-via-infinity-datasource.md
|
||||
- whats-new/2025-09-02-enhanced-ad-hoc-filter-support.md
|
||||
- whats-new/2025-09-02-new-dashboard-apis-now-enabled-by-default.md
|
||||
- title: Reporting
|
||||
items:
|
||||
- whats-new/2025-05-27-new-and-improved-reporting.md
|
||||
- title: Data sources
|
||||
items:
|
||||
- whats-new/2025-08-12-jenkins-enterprise-data-source-for-grafana.md
|
||||
- whats-new/2025-07-16-google-sheets-data-source-now-supports-template-variables.md
|
||||
- whats-new/2025-09-04-azure-monitor-resource-picker-filtering-and-recent-resources.md
|
||||
- title: Explore
|
||||
items:
|
||||
- whats-new/2025-07-08-saved-queries-in-dashboards-and-explore.md
|
||||
- title: Logs Drilldown
|
||||
items:
|
||||
- whats-new/2025-08-29-json-log-line-viewer-in-logs-drilldown-is-now-generally-available.md
|
||||
- title: Metrics Drilldown
|
||||
items:
|
||||
- whats-new/2025-08-07-grafana-metrics-drilldown-entry-point-from-alerting-rule.md
|
||||
- title: Plugins
|
||||
items:
|
||||
- whats-new/2025-09-11-translate-your-plugin.md
|
||||
- title: Authentication and authorization
|
||||
items:
|
||||
- whats-new/2025-09-10-scim-configuration-ui.md
|
||||
whats_new_grafana_version: 12.2
|
||||
weight: -51
|
||||
---
|
||||
|
||||
# What’s new in Grafana v12.2
|
||||
|
||||
Welcome to Grafana 12.2! This release focuses on making it easier to gain insights from your data.
|
||||
|
||||
We're excited to announce several features are now GA. Enhanced ad hoc filtering transforms your dashboards into true command centers, allowing you to slice and dice datasets on the fly. The redesigned table visualization offers improved performance and visual aids for quick pattern and anomaly identification, helping you make faster decisions. The Logs Drilldown JSON viewer makes intimidating log structures organized and explorable. Metrics Drilldown now integrates with alert creation in Grafana, so you can explore Prometheus data with intuitive point-and-click interactions, find the right visualization, and easily use its query in your alert rule.
|
||||
|
||||
We're also collecting feedback on some new public preview features. AI-powered SQL expressions eliminate the barrier between questions and answers by generating SQL queries from natural language and providing instant explanations for existing queries. Our enhanced Canvas Pan and Zoom experience lets you design complex dashboards exactly as you envision them.
|
||||
|
||||
Keep reading to learn more about everything 12.2 has in store.
|
||||
|
||||
{{< youtube id=-7A_tePidEM >}}
|
||||
|
||||
For even more detail about all the changes in this release, refer to the [changelog](https://github.com/grafana/grafana/blob/main/CHANGELOG.md). For the specific steps we recommend when you upgrade to v12.2, check out our [Upgrade Guide](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/upgrade-guide/upgrade-v12.2/).
|
||||
|
||||
{{< docs/whats-new >}}
|
||||
@@ -11,7 +11,7 @@ const waitForTableLoad = async (loc: Page | Locator) => {
|
||||
};
|
||||
|
||||
test.describe('Panels test: Table - Footer', { tag: ['@panels', '@table'] }, () => {
|
||||
test('Footer unaffected by filtering', async ({ gotoDashboardPage, selectors, page }) => {
|
||||
test('Footer affected by filtering', async ({ gotoDashboardPage, selectors, page }) => {
|
||||
const dashboardPage = await gotoDashboardPage({
|
||||
uid: DASHBOARD_UID,
|
||||
queryParams: new URLSearchParams({ editPanel: '4' }),
|
||||
@@ -51,7 +51,7 @@ test.describe('Panels test: Table - Footer', { tag: ['@panels', '@table'] }, ()
|
||||
dashboardPage
|
||||
.getByGrafanaSelector(selectors.components.Panels.Visualization.TableNG.Footer.Value)
|
||||
.nth(minColumnIdx)
|
||||
).toHaveText(minReducerValue);
|
||||
).not.toHaveText(minReducerValue);
|
||||
});
|
||||
|
||||
test('Footer unaffected by sorting', async ({ gotoDashboardPage, selectors, page }) => {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@test-plugins/extensions-test-app",
|
||||
"version": "12.2.0-pre",
|
||||
"version": "12.2.1",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"build": "NODE_OPTIONS='--experimental-strip-types --no-warnings=ExperimentalWarning' webpack -c ./webpack.config.ts --env production",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@test-plugins/grafana-e2etest-datasource",
|
||||
"version": "12.2.0-pre",
|
||||
"version": "12.2.1",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"build": "NODE_OPTIONS='--experimental-strip-types --no-warnings=ExperimentalWarning' webpack -c ./webpack.config.ts --env production",
|
||||
|
||||
@@ -85,5 +85,63 @@ test.describe(
|
||||
expectedRange = 'Time range selected: 2024-06-05 10:04:00 to 2024-06-05 10:05:00'; // 1 min back
|
||||
await expect(timePickerButton).toHaveAttribute('aria-label', expectedRange);
|
||||
});
|
||||
|
||||
test('ctrl+o should toggle shared crosshair', async ({ page, selectors }) => {
|
||||
// Navigate to a new dashboard
|
||||
await page.goto('/dashboard/new?orgId=1');
|
||||
|
||||
// Wait for dashboard to load
|
||||
await page.waitForLoadState('networkidle');
|
||||
|
||||
// Wait for dashboard to be fully initialized by checking for dashboard content
|
||||
await page
|
||||
.locator('[data-testid*="dashboard"]')
|
||||
.or(page.locator('text=Start your new dashboard'))
|
||||
.first()
|
||||
.waitFor({ state: 'visible' });
|
||||
|
||||
// Test the keyboard shortcut first in the main dashboard view
|
||||
const currentUrl = page.url();
|
||||
const modKey = process.platform === 'darwin' ? 'Meta' : 'Control';
|
||||
|
||||
// Test that mod+o works in the main dashboard (should not trigger file dialog)
|
||||
console.log('Testing mod+o in main dashboard view...');
|
||||
await page.keyboard.press(`${modKey}+o`);
|
||||
expect(page.url()).toBe(currentUrl); // Should not navigate away
|
||||
|
||||
// Now open settings to check if the state actually changed
|
||||
await page.keyboard.press('d');
|
||||
await page.keyboard.press('s');
|
||||
|
||||
// Wait for settings page to load by checking for the General tab or settings content
|
||||
await page
|
||||
.locator('text=General')
|
||||
.or(page.locator('[data-testid*="dashboard-settings"]'))
|
||||
.waitFor({ state: 'visible' });
|
||||
|
||||
// Wait for Panel options section to be visible and scroll to it
|
||||
const panelOptionsSection = page.locator('text=Panel options');
|
||||
await panelOptionsSection.waitFor({ state: 'visible' });
|
||||
await panelOptionsSection.scrollIntoViewIfNeeded();
|
||||
|
||||
// Wait for radio buttons to be visible
|
||||
await page
|
||||
.locator('[role="radiogroup"]')
|
||||
.last()
|
||||
.locator('input[type="radio"]')
|
||||
.first()
|
||||
.waitFor({ state: 'visible' });
|
||||
|
||||
// Check current state - after one mod+o press, it should be crosshair (1)
|
||||
await expect(page.locator('[role="radiogroup"]').last().locator('input[type="radio"]').nth(1)).toBeChecked(); // Shared crosshair
|
||||
|
||||
// Test second press in the main dashboard view (should go to tooltip)
|
||||
await page.keyboard.press(`${modKey}+o`);
|
||||
await expect(page.locator('[role="radiogroup"]').last().locator('input[type="radio"]').nth(2)).toBeChecked(); // Shared tooltip
|
||||
|
||||
// Test third press in the main dashboard view (should go back to default)
|
||||
await page.keyboard.press(`${modKey}+o`);
|
||||
await expect(page.locator('[role="radiogroup"]').last().locator('input[type="radio"]').nth(0)).toBeChecked(); // Default
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
@@ -996,6 +996,7 @@ github.com/grafana/cog v0.0.38/go.mod h1:UDstzYqMdgIROmbfkHL8fB9XWQO2lnf5z+4W/eJ
|
||||
github.com/grafana/go-gelf/v2 v2.0.1 h1:BOChP0h/jLeD+7F9mL7tq10xVkDG15he3T1zHuQaWak=
|
||||
github.com/grafana/go-gelf/v2 v2.0.1/go.mod h1:lexHie0xzYGwCgiRGcvZ723bSNyNI8ZRD4s0CLobh90=
|
||||
github.com/grafana/gomemcache v0.0.0-20250228145437-da7b95fd2ac1/go.mod h1:j/s0jkda4UXTemDs7Pgw/vMT06alWc42CHisvYac0qw=
|
||||
github.com/grafana/gomemcache v0.0.0-20250828162811-a96f6acee2fe/go.mod h1:j/s0jkda4UXTemDs7Pgw/vMT06alWc42CHisvYac0qw=
|
||||
github.com/grafana/grafana-app-sdk v0.40.1/go.mod h1:4P8h7VB6KcDjX9bAoBQc6IP8iNylxe6bSXLR9gA39gM=
|
||||
github.com/grafana/grafana-app-sdk v0.41.0 h1:SYHN3U7B1myRKY3UZZDkFsue9TDmAOap0UrQVTqtYBU=
|
||||
github.com/grafana/grafana-app-sdk v0.41.0/go.mod h1:Wg/3vEZfok1hhIWiHaaJm+FwkosfO98o8KbeLFEnZpY=
|
||||
|
||||
+1
-1
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
||||
"npmClient": "yarn",
|
||||
"version": "12.2.0-pre"
|
||||
"version": "12.2.1"
|
||||
}
|
||||
|
||||
+1
-1
@@ -3,7 +3,7 @@
|
||||
"license": "AGPL-3.0-only",
|
||||
"private": true,
|
||||
"name": "grafana",
|
||||
"version": "12.2.0-pre",
|
||||
"version": "12.2.1",
|
||||
"repository": "github:grafana/grafana",
|
||||
"scripts": {
|
||||
"predev": "./scripts/check-frontend-dev.sh",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/alerting",
|
||||
"version": "12.2.0-pre",
|
||||
"version": "12.2.1",
|
||||
"description": "Grafana Alerting Library – Build vertical integrations on top of the industry-leading alerting solution",
|
||||
"keywords": [
|
||||
"typescript",
|
||||
@@ -14,7 +14,7 @@
|
||||
"sideEffects": false,
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+http://github.com/grafana/grafana.git",
|
||||
"url": "http://github.com/grafana/grafana.git",
|
||||
"directory": "packages/grafana-alerting"
|
||||
},
|
||||
"main": "src/index.ts",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/data",
|
||||
"version": "12.2.0-pre",
|
||||
"version": "12.2.1",
|
||||
"description": "Grafana Data Library",
|
||||
"keywords": [
|
||||
"typescript"
|
||||
@@ -56,8 +56,8 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@braintree/sanitize-url": "7.0.1",
|
||||
"@grafana/i18n": "12.2.0-pre",
|
||||
"@grafana/schema": "12.2.0-pre",
|
||||
"@grafana/i18n": "12.2.1",
|
||||
"@grafana/schema": "12.2.1",
|
||||
"@leeoniya/ufuzzy": "1.0.18",
|
||||
"@types/d3-interpolate": "^3.0.0",
|
||||
"@types/string-hash": "1.1.3",
|
||||
|
||||
@@ -167,9 +167,9 @@ describe('validatePath', () => {
|
||||
expect(validatePath(urlWithDots)).toBe(urlWithDots);
|
||||
});
|
||||
|
||||
it('should allow query parameters that contain dots', () => {
|
||||
const urlWithDotsInQuery = 'https://api.example.com/search?version=1.2.3&file=../config';
|
||||
expect(validatePath(urlWithDotsInQuery)).toBe(urlWithDotsInQuery);
|
||||
it('should block query parameters that contain path traversal', () => {
|
||||
const urlWithTraversalInQuery = 'https://api.example.com/search?version=1.2.3&file=../config';
|
||||
expect(() => validatePath(urlWithTraversalInQuery)).toThrow(PathValidationError);
|
||||
});
|
||||
|
||||
it('should handle malformed URLs gracefully', () => {
|
||||
|
||||
@@ -146,27 +146,25 @@ export class PathValidationError extends Error {
|
||||
*/
|
||||
export function validatePath<OriginalPath extends string>(path: OriginalPath): OriginalPath {
|
||||
try {
|
||||
let originalDecoded: string = path; // down-cast to a string to indicate this can't be returned
|
||||
let decoded: string = path;
|
||||
while (true) {
|
||||
const nextDecode = decodeURIComponent(originalDecoded);
|
||||
if (nextDecode === originalDecoded) {
|
||||
const nextDecode = decodeURIComponent(decoded);
|
||||
if (nextDecode === decoded) {
|
||||
break; // String is fully decoded.
|
||||
}
|
||||
originalDecoded = nextDecode;
|
||||
decoded = nextDecode;
|
||||
}
|
||||
|
||||
// Remove query params and fragments to check only the path portion
|
||||
const cleaned = originalDecoded.split(/[\?#]/)[0];
|
||||
originalDecoded = cleaned;
|
||||
|
||||
// If the original string contains traversal attempts, block it
|
||||
if (/\.\.|\/\\|[\t\n\r]/.test(originalDecoded)) {
|
||||
// Validate the entire decoded string for traversal attempts
|
||||
// This prevents attacks that use query separators to hide traversal payloads
|
||||
if (/\.\.|\/\\|[\t\n\r]/.test(decoded)) {
|
||||
throw new PathValidationError();
|
||||
}
|
||||
|
||||
// Return the original path (not the decoded version) to preserve the full URL
|
||||
return path;
|
||||
} catch (err) {
|
||||
// Rethrow the original InvalidPathError to preserve the stack trace
|
||||
// Rethrow the original PathValidationError to preserve the stack trace
|
||||
if (err instanceof PathValidationError) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/e2e-selectors",
|
||||
"version": "12.2.0-pre",
|
||||
"version": "12.2.1",
|
||||
"description": "Grafana End-to-End Test Selectors Library",
|
||||
"keywords": [
|
||||
"cli",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@grafana/eslint-plugin",
|
||||
"description": "ESLint rules for use within the Grafana repo. Not suitable (or supported) for external use.",
|
||||
"version": "12.2.0-pre",
|
||||
"version": "12.2.1",
|
||||
"main": "./index.cjs",
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/flamegraph",
|
||||
"version": "12.2.0-pre",
|
||||
"version": "12.2.1",
|
||||
"description": "Grafana flamegraph visualization component",
|
||||
"keywords": [
|
||||
"grafana",
|
||||
@@ -44,8 +44,8 @@
|
||||
],
|
||||
"dependencies": {
|
||||
"@emotion/css": "11.13.5",
|
||||
"@grafana/data": "12.2.0-pre",
|
||||
"@grafana/ui": "12.2.0-pre",
|
||||
"@grafana/data": "12.2.1",
|
||||
"@grafana/ui": "12.2.1",
|
||||
"@leeoniya/ufuzzy": "1.0.18",
|
||||
"d3": "^7.8.5",
|
||||
"lodash": "4.17.21",
|
||||
|
||||
@@ -5,9 +5,10 @@ import { createDataFrame } from '@grafana/data';
|
||||
|
||||
import { FlameGraphDataContainer } from '../FlameGraph/dataTransform';
|
||||
import { data } from '../FlameGraph/testData/dataNestedSet';
|
||||
import { textToDataContainer } from '../FlameGraph/testHelpers';
|
||||
import { ColorScheme } from '../types';
|
||||
|
||||
import FlameGraphTopTableContainer from './FlameGraphTopTableContainer';
|
||||
import FlameGraphTopTableContainer, { buildFilteredTable } from './FlameGraphTopTableContainer';
|
||||
|
||||
describe('FlameGraphTopTableContainer', () => {
|
||||
const setup = () => {
|
||||
@@ -52,7 +53,10 @@ describe('FlameGraphTopTableContainer', () => {
|
||||
expect(cells).toHaveLength(60); // 16 rows
|
||||
expect(cells[1].textContent).toEqual('net/http.HandlerFunc.ServeHTTP');
|
||||
expect(cells[2].textContent).toEqual('31.7 K');
|
||||
expect(cells[3].textContent).toEqual('31.7 Bil');
|
||||
expect(cells[3].textContent).toEqual('5.58 Bil');
|
||||
expect(cells[5].textContent).toEqual('total');
|
||||
expect(cells[6].textContent).toEqual('16.5 K');
|
||||
expect(cells[7].textContent).toEqual('16.5 Bil');
|
||||
expect(cells[25].textContent).toEqual('net/http.(*conn).serve');
|
||||
expect(cells[26].textContent).toEqual('5.63 K');
|
||||
expect(cells[27].textContent).toEqual('5.63 Bil');
|
||||
@@ -83,3 +87,111 @@ describe('FlameGraphTopTableContainer', () => {
|
||||
expect(mocks.onSandwich).toHaveBeenCalledWith('net/http.HandlerFunc.ServeHTTP');
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildFilteredTable', () => {
|
||||
it('should group data by label and sum values', () => {
|
||||
const container = textToDataContainer(`
|
||||
[0////]
|
||||
[1][2]
|
||||
[3][4]
|
||||
`);
|
||||
|
||||
const result = buildFilteredTable(container!);
|
||||
|
||||
expect(result).toEqual({
|
||||
'0': { self: 1, total: 7, totalRight: 0 },
|
||||
'1': { self: 0, total: 3, totalRight: 0 },
|
||||
'2': { self: 0, total: 3, totalRight: 0 },
|
||||
'3': { self: 3, total: 3, totalRight: 0 },
|
||||
'4': { self: 3, total: 3, totalRight: 0 },
|
||||
});
|
||||
});
|
||||
|
||||
it('should sum values for duplicate labels', () => {
|
||||
const container = textToDataContainer(`
|
||||
[0///]
|
||||
[1][1]
|
||||
`);
|
||||
|
||||
const result = buildFilteredTable(container!);
|
||||
|
||||
expect(result).toEqual({
|
||||
'0': { self: 0, total: 6, totalRight: 0 },
|
||||
'1': { self: 6, total: 6, totalRight: 0 },
|
||||
});
|
||||
});
|
||||
|
||||
it('should filter by matchedLabels when provided', () => {
|
||||
const container = textToDataContainer(`
|
||||
[0////]
|
||||
[1][2]
|
||||
[3][4]
|
||||
`);
|
||||
|
||||
const matchedLabels = new Set(['1', '3']);
|
||||
const result = buildFilteredTable(container!, matchedLabels);
|
||||
|
||||
expect(result).toEqual({
|
||||
'1': { self: 0, total: 3, totalRight: 0 },
|
||||
'3': { self: 3, total: 3, totalRight: 0 },
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle empty matchedLabels set', () => {
|
||||
const container = textToDataContainer(`
|
||||
[0////]
|
||||
[1][2]
|
||||
[3][4]
|
||||
`);
|
||||
|
||||
const matchedLabels = new Set<string>();
|
||||
const result = buildFilteredTable(container!, matchedLabels);
|
||||
|
||||
expect(result).toEqual({});
|
||||
});
|
||||
|
||||
it('should handle data with no matches', () => {
|
||||
const container = textToDataContainer(`
|
||||
[0////]
|
||||
[1][2]
|
||||
[3][4]
|
||||
`);
|
||||
|
||||
const matchedLabels = new Set(['9']);
|
||||
const result = buildFilteredTable(container!, matchedLabels);
|
||||
|
||||
expect(result).toEqual({});
|
||||
});
|
||||
|
||||
it('should work without matchedLabels filter', () => {
|
||||
const container = textToDataContainer(`
|
||||
[0]
|
||||
[1]
|
||||
`);
|
||||
|
||||
const result = buildFilteredTable(container!);
|
||||
|
||||
expect(result).toEqual({
|
||||
'0': { self: 0, total: 3, totalRight: 0 },
|
||||
'1': { self: 3, total: 3, totalRight: 0 },
|
||||
});
|
||||
});
|
||||
it('should not inflate totals for recursive calls', () => {
|
||||
const container = textToDataContainer(`
|
||||
[0////]
|
||||
[1][2]
|
||||
[3][4]
|
||||
[0]
|
||||
`);
|
||||
|
||||
const result = buildFilteredTable(container!);
|
||||
|
||||
expect(result).toEqual({
|
||||
'0': { self: 4, total: 7, totalRight: 0 },
|
||||
'1': { self: 0, total: 3, totalRight: 0 },
|
||||
'2': { self: 0, total: 3, totalRight: 0 },
|
||||
'3': { self: 0, total: 3, totalRight: 0 },
|
||||
'4': { self: 3, total: 3, totalRight: 0 },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -53,28 +53,7 @@ const FlameGraphTopTableContainer = memo(
|
||||
onTableSort,
|
||||
colorScheme,
|
||||
}: Props) => {
|
||||
const table = useMemo(() => {
|
||||
// Group the data by label, we show only one row per label and sum the values
|
||||
// TODO: should be by filename + funcName + linenumber?
|
||||
let filteredTable: { [key: string]: TableData } = Object.create(null);
|
||||
for (let i = 0; i < data.data.length; i++) {
|
||||
const value = data.getValue(i);
|
||||
const valueRight = data.getValueRight(i);
|
||||
const self = data.getSelf(i);
|
||||
const label = data.getLabel(i);
|
||||
|
||||
// If user is doing text search we filter out labels in the same way we highlight them in flame graph.
|
||||
if (!matchedLabels || matchedLabels.has(label)) {
|
||||
filteredTable[label] = filteredTable[label] || {};
|
||||
filteredTable[label].self = filteredTable[label].self ? filteredTable[label].self + self : self;
|
||||
filteredTable[label].total = filteredTable[label].total ? filteredTable[label].total + value : value;
|
||||
filteredTable[label].totalRight = filteredTable[label].totalRight
|
||||
? filteredTable[label].totalRight + valueRight
|
||||
: valueRight;
|
||||
}
|
||||
}
|
||||
return filteredTable;
|
||||
}, [data, matchedLabels]);
|
||||
const table = useMemo(() => buildFilteredTable(data, matchedLabels), [data, matchedLabels]);
|
||||
|
||||
const styles = useStyles2(getStyles);
|
||||
const theme = useTheme2();
|
||||
@@ -124,6 +103,49 @@ const FlameGraphTopTableContainer = memo(
|
||||
|
||||
FlameGraphTopTableContainer.displayName = 'FlameGraphTopTableContainer';
|
||||
|
||||
function buildFilteredTable(data: FlameGraphDataContainer, matchedLabels?: Set<string>) {
|
||||
// Group the data by label, we show only one row per label and sum the values
|
||||
// TODO: should be by filename + funcName + linenumber?
|
||||
let filteredTable: { [key: string]: TableData } = Object.create(null);
|
||||
|
||||
// Track call stack to detect recursive calls
|
||||
const callStack: string[] = [];
|
||||
|
||||
for (let i = 0; i < data.data.length; i++) {
|
||||
const value = data.getValue(i);
|
||||
const valueRight = data.getValueRight(i);
|
||||
const self = data.getSelf(i);
|
||||
const label = data.getLabel(i);
|
||||
const level = data.getLevel(i);
|
||||
|
||||
// Maintain call stack based on level changes
|
||||
while (callStack.length > level) {
|
||||
callStack.pop();
|
||||
}
|
||||
|
||||
// Check if this is a recursive call (same label already in call stack)
|
||||
const isRecursive = callStack.some((entry) => entry === label);
|
||||
|
||||
// If user is doing text search we filter out labels in the same way we highlight them in flame graph.
|
||||
if (!matchedLabels || matchedLabels.has(label)) {
|
||||
filteredTable[label] = filteredTable[label] || {};
|
||||
filteredTable[label].self = filteredTable[label].self ? filteredTable[label].self + self : self;
|
||||
|
||||
// Only add to total if this is not a recursive call
|
||||
if (!isRecursive) {
|
||||
filteredTable[label].total = filteredTable[label].total ? filteredTable[label].total + value : value;
|
||||
filteredTable[label].totalRight = filteredTable[label].totalRight
|
||||
? filteredTable[label].totalRight + valueRight
|
||||
: valueRight;
|
||||
}
|
||||
}
|
||||
|
||||
// Add current call to the stack
|
||||
callStack.push(label);
|
||||
}
|
||||
return filteredTable;
|
||||
}
|
||||
|
||||
function buildTableDataFrame(
|
||||
data: FlameGraphDataContainer,
|
||||
table: { [key: string]: TableData },
|
||||
@@ -365,4 +387,6 @@ const getStylesActionCell = () => {
|
||||
};
|
||||
};
|
||||
|
||||
export { buildFilteredTable };
|
||||
|
||||
export default FlameGraphTopTableContainer;
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/i18n",
|
||||
"version": "12.2.0-pre",
|
||||
"version": "12.2.1",
|
||||
"description": "Grafana Internationalization Library",
|
||||
"keywords": [
|
||||
"grafana",
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"license": "AGPL-3.0-only",
|
||||
"name": "@grafana/o11y-ds-frontend",
|
||||
"private": true,
|
||||
"version": "12.2.0-pre",
|
||||
"version": "12.2.1",
|
||||
"description": "Library to manage traces in Grafana.",
|
||||
"sideEffects": false,
|
||||
"repository": {
|
||||
@@ -18,12 +18,12 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@emotion/css": "11.13.5",
|
||||
"@grafana/data": "12.2.0-pre",
|
||||
"@grafana/e2e-selectors": "12.2.0-pre",
|
||||
"@grafana/data": "12.2.1",
|
||||
"@grafana/e2e-selectors": "12.2.1",
|
||||
"@grafana/plugin-ui": "^0.10.10",
|
||||
"@grafana/runtime": "12.2.0-pre",
|
||||
"@grafana/schema": "12.2.0-pre",
|
||||
"@grafana/ui": "12.2.0-pre",
|
||||
"@grafana/runtime": "12.2.1",
|
||||
"@grafana/schema": "12.2.1",
|
||||
"@grafana/ui": "12.2.1",
|
||||
"react-select": "5.10.2",
|
||||
"react-use": "17.6.0",
|
||||
"rxjs": "7.8.2",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"name": "@grafana/plugin-configs",
|
||||
"description": "Shared dependencies and files for core plugins",
|
||||
"private": true,
|
||||
"version": "12.2.0-pre",
|
||||
"version": "12.2.1",
|
||||
"dependencies": {
|
||||
"tslib": "2.8.1"
|
||||
},
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "AGPL-3.0-only",
|
||||
"name": "@grafana/prometheus",
|
||||
"version": "12.2.0-pre",
|
||||
"version": "12.2.1",
|
||||
"description": "Grafana Prometheus Library",
|
||||
"keywords": [
|
||||
"typescript",
|
||||
@@ -41,13 +41,13 @@
|
||||
"dependencies": {
|
||||
"@emotion/css": "11.13.5",
|
||||
"@floating-ui/react": "0.27.16",
|
||||
"@grafana/data": "12.2.0-pre",
|
||||
"@grafana/e2e-selectors": "12.2.0-pre",
|
||||
"@grafana/i18n": "12.2.0-pre",
|
||||
"@grafana/data": "12.2.1",
|
||||
"@grafana/e2e-selectors": "12.2.1",
|
||||
"@grafana/i18n": "12.2.1",
|
||||
"@grafana/plugin-ui": "^0.10.10",
|
||||
"@grafana/runtime": "12.2.0-pre",
|
||||
"@grafana/schema": "12.2.0-pre",
|
||||
"@grafana/ui": "12.2.0-pre",
|
||||
"@grafana/runtime": "12.2.1",
|
||||
"@grafana/schema": "12.2.1",
|
||||
"@grafana/ui": "12.2.1",
|
||||
"@hello-pangea/dnd": "18.0.1",
|
||||
"@leeoniya/ufuzzy": "1.0.18",
|
||||
"@lezer/common": "1.2.3",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/runtime",
|
||||
"version": "12.2.0-pre",
|
||||
"version": "12.2.1",
|
||||
"description": "Grafana Runtime Library",
|
||||
"keywords": [
|
||||
"grafana",
|
||||
@@ -53,11 +53,11 @@
|
||||
"postpack": "mv package.json.bak package.json && rimraf ./unstable"
|
||||
},
|
||||
"dependencies": {
|
||||
"@grafana/data": "12.2.0-pre",
|
||||
"@grafana/e2e-selectors": "12.2.0-pre",
|
||||
"@grafana/data": "12.2.1",
|
||||
"@grafana/e2e-selectors": "12.2.1",
|
||||
"@grafana/faro-web-sdk": "^1.13.2",
|
||||
"@grafana/schema": "12.2.0-pre",
|
||||
"@grafana/ui": "12.2.0-pre",
|
||||
"@grafana/schema": "12.2.1",
|
||||
"@grafana/ui": "12.2.1",
|
||||
"@types/systemjs": "6.15.3",
|
||||
"history": "4.10.1",
|
||||
"lodash": "4.17.21",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/schema",
|
||||
"version": "12.2.0-pre",
|
||||
"version": "12.2.1",
|
||||
"description": "Grafana Schema Library",
|
||||
"keywords": [
|
||||
"typescript"
|
||||
|
||||
+1
-1
@@ -8,7 +8,7 @@
|
||||
//
|
||||
// Run 'make gen-cue' from repository root to regenerate.
|
||||
|
||||
export const pluginVersion = "12.2.0-pre";
|
||||
export const pluginVersion = "12.2.1";
|
||||
|
||||
export interface Options {
|
||||
limit: number;
|
||||
|
||||
+1
-1
@@ -10,7 +10,7 @@
|
||||
|
||||
import * as common from '@grafana/schema';
|
||||
|
||||
export const pluginVersion = "12.2.0-pre";
|
||||
export const pluginVersion = "12.2.1";
|
||||
|
||||
export interface Options extends common.OptionsWithLegend, common.OptionsWithTooltip, common.OptionsWithTextFormatting {
|
||||
/**
|
||||
|
||||
+1
-1
@@ -10,7 +10,7 @@
|
||||
|
||||
import * as common from '@grafana/schema';
|
||||
|
||||
export const pluginVersion = "12.2.0-pre";
|
||||
export const pluginVersion = "12.2.1";
|
||||
|
||||
export interface Options extends common.OptionsWithLegend, common.SingleStatBaseOptions {
|
||||
displayMode: common.BarGaugeDisplayMode;
|
||||
|
||||
+1
-1
@@ -10,7 +10,7 @@
|
||||
|
||||
import * as common from '@grafana/schema';
|
||||
|
||||
export const pluginVersion = "12.2.0-pre";
|
||||
export const pluginVersion = "12.2.1";
|
||||
|
||||
export enum VizDisplayMode {
|
||||
Candles = 'candles',
|
||||
|
||||
+1
-1
@@ -10,7 +10,7 @@
|
||||
|
||||
import * as ui from '@grafana/schema';
|
||||
|
||||
export const pluginVersion = "12.2.0-pre";
|
||||
export const pluginVersion = "12.2.1";
|
||||
|
||||
export enum HorizontalConstraint {
|
||||
Center = 'center',
|
||||
|
||||
+1
-1
@@ -10,7 +10,7 @@
|
||||
|
||||
import * as common from '@grafana/schema';
|
||||
|
||||
export const pluginVersion = "12.2.0-pre";
|
||||
export const pluginVersion = "12.2.1";
|
||||
|
||||
export interface MetricStat {
|
||||
/**
|
||||
|
||||
+1
-1
@@ -8,7 +8,7 @@
|
||||
//
|
||||
// Run 'make gen-cue' from repository root to regenerate.
|
||||
|
||||
export const pluginVersion = "12.2.0-pre";
|
||||
export const pluginVersion = "12.2.1";
|
||||
|
||||
export interface Options {
|
||||
/**
|
||||
|
||||
+1
-1
@@ -8,7 +8,7 @@
|
||||
//
|
||||
// Run 'make gen-cue' from repository root to regenerate.
|
||||
|
||||
export const pluginVersion = "12.2.0-pre";
|
||||
export const pluginVersion = "12.2.1";
|
||||
|
||||
export interface Options {
|
||||
selectedSeries: number;
|
||||
|
||||
+1
-1
@@ -8,7 +8,7 @@
|
||||
//
|
||||
// Run 'make gen-cue' from repository root to regenerate.
|
||||
|
||||
export const pluginVersion = "12.2.0-pre";
|
||||
export const pluginVersion = "12.2.1";
|
||||
|
||||
export type UpdateConfig = {
|
||||
render: boolean,
|
||||
|
||||
+1
-1
@@ -10,7 +10,7 @@
|
||||
|
||||
import * as common from '@grafana/schema';
|
||||
|
||||
export const pluginVersion = "12.2.0-pre";
|
||||
export const pluginVersion = "12.2.1";
|
||||
|
||||
export type BucketAggregation = (DateHistogram | Histogram | Terms | Filters | GeoHashGrid | Nested);
|
||||
|
||||
|
||||
+1
-1
@@ -10,7 +10,7 @@
|
||||
|
||||
import * as common from '@grafana/schema';
|
||||
|
||||
export const pluginVersion = "12.2.0-pre";
|
||||
export const pluginVersion = "12.2.1";
|
||||
|
||||
export interface Options extends common.SingleStatBaseOptions {
|
||||
minVizHeight: number;
|
||||
|
||||
+1
-1
@@ -10,7 +10,7 @@
|
||||
|
||||
import * as ui from '@grafana/schema';
|
||||
|
||||
export const pluginVersion = "12.2.0-pre";
|
||||
export const pluginVersion = "12.2.1";
|
||||
|
||||
export interface Options {
|
||||
basemap: ui.MapLayerOptions;
|
||||
|
||||
+1
-1
@@ -10,7 +10,7 @@
|
||||
|
||||
import * as ui from '@grafana/schema';
|
||||
|
||||
export const pluginVersion = "12.2.0-pre";
|
||||
export const pluginVersion = "12.2.1";
|
||||
|
||||
/**
|
||||
* Controls the color mode of the heatmap
|
||||
|
||||
+1
-1
@@ -10,7 +10,7 @@
|
||||
|
||||
import * as common from '@grafana/schema';
|
||||
|
||||
export const pluginVersion = "12.2.0-pre";
|
||||
export const pluginVersion = "12.2.1";
|
||||
|
||||
export interface Options extends common.OptionsWithLegend, common.OptionsWithTooltip {
|
||||
/**
|
||||
|
||||
+1
-1
@@ -10,7 +10,7 @@
|
||||
|
||||
import * as common from '@grafana/schema';
|
||||
|
||||
export const pluginVersion = "12.2.0-pre";
|
||||
export const pluginVersion = "12.2.1";
|
||||
|
||||
export interface Options {
|
||||
controlsStorageKey?: string;
|
||||
|
||||
+1
-1
@@ -8,7 +8,7 @@
|
||||
//
|
||||
// Run 'make gen-cue' from repository root to regenerate.
|
||||
|
||||
export const pluginVersion = "12.2.0-pre";
|
||||
export const pluginVersion = "12.2.1";
|
||||
|
||||
export interface Options {
|
||||
/**
|
||||
|
||||
+1
-1
@@ -8,7 +8,7 @@
|
||||
//
|
||||
// Run 'make gen-cue' from repository root to regenerate.
|
||||
|
||||
export const pluginVersion = "12.2.0-pre";
|
||||
export const pluginVersion = "12.2.1";
|
||||
|
||||
export interface ArcOption {
|
||||
/**
|
||||
|
||||
+1
-1
@@ -10,7 +10,7 @@
|
||||
|
||||
import * as common from '@grafana/schema';
|
||||
|
||||
export const pluginVersion = "12.2.0-pre";
|
||||
export const pluginVersion = "12.2.1";
|
||||
|
||||
/**
|
||||
* Select the pie chart display style.
|
||||
|
||||
+1
-1
@@ -10,7 +10,7 @@
|
||||
|
||||
import * as common from '@grafana/schema';
|
||||
|
||||
export const pluginVersion = "12.2.0-pre";
|
||||
export const pluginVersion = "12.2.1";
|
||||
|
||||
export interface Options extends common.SingleStatBaseOptions {
|
||||
colorMode: common.BigValueColorMode;
|
||||
|
||||
+1
-1
@@ -10,7 +10,7 @@
|
||||
|
||||
import * as ui from '@grafana/schema';
|
||||
|
||||
export const pluginVersion = "12.2.0-pre";
|
||||
export const pluginVersion = "12.2.1";
|
||||
|
||||
export interface Options extends ui.OptionsWithLegend, ui.OptionsWithTooltip, ui.OptionsWithTimezones {
|
||||
/**
|
||||
|
||||
+1
-1
@@ -10,7 +10,7 @@
|
||||
|
||||
import * as ui from '@grafana/schema';
|
||||
|
||||
export const pluginVersion = "12.2.0-pre";
|
||||
export const pluginVersion = "12.2.1";
|
||||
|
||||
export interface Options extends ui.OptionsWithLegend, ui.OptionsWithTooltip, ui.OptionsWithTimezones {
|
||||
/**
|
||||
|
||||
+1
-1
@@ -10,7 +10,7 @@
|
||||
|
||||
import * as ui from '@grafana/schema';
|
||||
|
||||
export const pluginVersion = "12.2.0-pre";
|
||||
export const pluginVersion = "12.2.1";
|
||||
|
||||
export interface Options {
|
||||
/**
|
||||
|
||||
+1
-1
@@ -8,7 +8,7 @@
|
||||
//
|
||||
// Run 'make gen-cue' from repository root to regenerate.
|
||||
|
||||
export const pluginVersion = "12.2.0-pre";
|
||||
export const pluginVersion = "12.2.1";
|
||||
|
||||
export enum TextMode {
|
||||
Code = 'code',
|
||||
|
||||
+1
-1
@@ -10,7 +10,7 @@
|
||||
|
||||
import * as common from '@grafana/schema';
|
||||
|
||||
export const pluginVersion = "12.2.0-pre";
|
||||
export const pluginVersion = "12.2.1";
|
||||
|
||||
export interface Options extends common.OptionsWithTimezones {
|
||||
legend: common.VizLegendOptions;
|
||||
|
||||
+1
-1
@@ -10,7 +10,7 @@
|
||||
|
||||
import * as common from '@grafana/schema';
|
||||
|
||||
export const pluginVersion = "12.2.0-pre";
|
||||
export const pluginVersion = "12.2.1";
|
||||
|
||||
/**
|
||||
* Identical to timeseries... except it does not have timezone settings
|
||||
|
||||
+1
-1
@@ -10,7 +10,7 @@
|
||||
|
||||
import * as common from '@grafana/schema';
|
||||
|
||||
export const pluginVersion = "12.2.0-pre";
|
||||
export const pluginVersion = "12.2.1";
|
||||
|
||||
export enum PointShape {
|
||||
Circle = 'circle',
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"license": "AGPL-3.0-only",
|
||||
"private": true,
|
||||
"name": "@grafana/sql",
|
||||
"version": "12.2.0-pre",
|
||||
"version": "12.2.1",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "http://github.com/grafana/grafana.git",
|
||||
@@ -16,12 +16,12 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@emotion/css": "11.13.5",
|
||||
"@grafana/data": "12.2.0-pre",
|
||||
"@grafana/e2e-selectors": "12.2.0-pre",
|
||||
"@grafana/i18n": "12.2.0-pre",
|
||||
"@grafana/data": "12.2.1",
|
||||
"@grafana/e2e-selectors": "12.2.1",
|
||||
"@grafana/i18n": "12.2.1",
|
||||
"@grafana/plugin-ui": "^0.10.10",
|
||||
"@grafana/runtime": "12.2.0-pre",
|
||||
"@grafana/ui": "12.2.0-pre",
|
||||
"@grafana/runtime": "12.2.1",
|
||||
"@grafana/ui": "12.2.1",
|
||||
"@react-awesome-query-builder/ui": "6.6.15",
|
||||
"immutable": "5.1.3",
|
||||
"lodash": "4.17.21",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/test-utils",
|
||||
"version": "12.2.0-pre",
|
||||
"version": "12.2.1",
|
||||
"private": true,
|
||||
"description": "Grafana test utils & Mock API",
|
||||
"keywords": [
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/ui",
|
||||
"version": "12.2.0-pre",
|
||||
"version": "12.2.1",
|
||||
"description": "Grafana Components Library",
|
||||
"keywords": [
|
||||
"grafana",
|
||||
@@ -67,11 +67,11 @@
|
||||
"@emotion/react": "11.14.0",
|
||||
"@emotion/serialize": "1.3.3",
|
||||
"@floating-ui/react": "0.27.16",
|
||||
"@grafana/data": "12.2.0-pre",
|
||||
"@grafana/e2e-selectors": "12.2.0-pre",
|
||||
"@grafana/data": "12.2.1",
|
||||
"@grafana/e2e-selectors": "12.2.1",
|
||||
"@grafana/faro-web-sdk": "^1.13.2",
|
||||
"@grafana/i18n": "12.2.0-pre",
|
||||
"@grafana/schema": "12.2.0-pre",
|
||||
"@grafana/i18n": "12.2.1",
|
||||
"@grafana/schema": "12.2.1",
|
||||
"@hello-pangea/dnd": "18.0.1",
|
||||
"@monaco-editor/react": "4.7.0",
|
||||
"@popperjs/core": "2.11.8",
|
||||
|
||||
@@ -108,7 +108,6 @@ export function TableNG(props: TableNGProps) {
|
||||
enablePagination = false,
|
||||
enableSharedCrosshair = false,
|
||||
enableVirtualization,
|
||||
fieldConfig,
|
||||
frozenColumns = 0,
|
||||
getActions = () => [],
|
||||
height,
|
||||
@@ -125,12 +124,6 @@ export function TableNG(props: TableNGProps) {
|
||||
width,
|
||||
} = props;
|
||||
|
||||
const hasFooter = useMemo(
|
||||
() => data.fields.some((field) => field.config?.custom?.footer?.reducers?.length ?? false),
|
||||
[data.fields]
|
||||
);
|
||||
const footerHeight = hasFooter ? calculateFooterHeight(data, fieldConfig) : 0;
|
||||
|
||||
const theme = useTheme2();
|
||||
const styles = useStyles2(getGridStyles, enablePagination, transparent);
|
||||
const panelContext = usePanelContext();
|
||||
@@ -146,7 +139,16 @@ export function TableNG(props: TableNGProps) {
|
||||
[getActions, data, userCanExecuteActions]
|
||||
);
|
||||
|
||||
const visibleFields = useMemo(() => getVisibleFields(data.fields), [data.fields]);
|
||||
const hasHeader = !noHeader;
|
||||
const hasFooter = useMemo(
|
||||
() => visibleFields.some((field) => Boolean(field.config.custom?.footer?.reducers?.length)),
|
||||
[visibleFields]
|
||||
);
|
||||
const footerHeight = useMemo(
|
||||
() => (hasFooter ? calculateFooterHeight(visibleFields) : 0),
|
||||
[hasFooter, visibleFields]
|
||||
);
|
||||
|
||||
const resizeHandler = useColumnResize(onColumnResize);
|
||||
|
||||
@@ -173,7 +175,7 @@ export function TableNG(props: TableNGProps) {
|
||||
const [expandedRows, setExpandedRows] = useState(() => new Set<number>());
|
||||
|
||||
// vt scrollbar accounting for column auto-sizing
|
||||
const visibleFields = useMemo(() => getVisibleFields(data.fields), [data.fields]);
|
||||
|
||||
const defaultRowHeight = useMemo(
|
||||
() => getDefaultRowHeight(theme, visibleFields, cellHeight),
|
||||
[theme, visibleFields, cellHeight]
|
||||
@@ -677,7 +679,7 @@ export function TableNG(props: TableNGProps) {
|
||||
),
|
||||
renderSummaryCell: () => (
|
||||
<SummaryCell
|
||||
rows={rows}
|
||||
rows={sortedRows}
|
||||
footers={footers}
|
||||
field={field}
|
||||
colIdx={i}
|
||||
@@ -707,10 +709,11 @@ export function TableNG(props: TableNGProps) {
|
||||
maxRowHeight,
|
||||
numFrozenColsFullyInView,
|
||||
onCellFilterAdded,
|
||||
rows,
|
||||
rowHeight,
|
||||
rowHeightFn,
|
||||
rows,
|
||||
setFilter,
|
||||
sortedRows,
|
||||
showTypeIcons,
|
||||
theme,
|
||||
timeRange,
|
||||
|
||||
@@ -46,6 +46,7 @@ import {
|
||||
getDefaultRowHeight,
|
||||
getDisplayName,
|
||||
predicateByName,
|
||||
calculateFooterHeight,
|
||||
} from './utils';
|
||||
|
||||
describe('TableNG utils', () => {
|
||||
@@ -1380,6 +1381,35 @@ describe('TableNG utils', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('calculateFooterHeight', () => {
|
||||
it('should return 0 if no footer is present', () => {
|
||||
const frame = createDataFrame({
|
||||
fields: [
|
||||
{ name: 'time', values: [1, 1, 2], nanos: [100, 99, 0] },
|
||||
{ name: 'value', values: [10, 20, 30] },
|
||||
],
|
||||
});
|
||||
|
||||
expect(calculateFooterHeight(frame.fields)).toBe(0);
|
||||
});
|
||||
|
||||
it('should return the height in pixels for the max reducers on a given field', () => {
|
||||
const frame = createDataFrame({
|
||||
fields: [
|
||||
{
|
||||
name: 'time',
|
||||
values: [1, 1, 2],
|
||||
nanos: [100, 99, 0],
|
||||
config: { custom: { footer: { reducers: ['min', 'max', 'count'] } } },
|
||||
},
|
||||
{ name: 'value', values: [10, 20, 30], config: { custom: { footer: { reducers: ['min'] } } } },
|
||||
],
|
||||
});
|
||||
|
||||
expect(calculateFooterHeight(frame.fields)).toBe(78); // 3 reducers * 22px line height + 12px padding
|
||||
});
|
||||
});
|
||||
|
||||
describe('getDisplayName', () => {
|
||||
it('should return the display name if set', () => {
|
||||
const field: Field = {
|
||||
|
||||
@@ -8,7 +8,6 @@ import { Count, varPreLine } from 'uwrap';
|
||||
import {
|
||||
FieldType,
|
||||
Field,
|
||||
FieldConfigSource,
|
||||
formattedValueToString,
|
||||
GrafanaTheme2,
|
||||
DisplayValue,
|
||||
@@ -842,55 +841,18 @@ export const processNestedTableRows = (
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* @internal
|
||||
* Get the maximum number of reducers across all fields
|
||||
*/
|
||||
const getMaxReducerCount = (dataFrame: DataFrame, fieldConfig?: FieldConfigSource): number => {
|
||||
// Filter to only numeric fields that can have reducers
|
||||
const numericFields = dataFrame.fields.filter(({ type }) => type === FieldType.number);
|
||||
|
||||
// If there are no numeric fields, return 0
|
||||
if (numericFields.length === 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Map each field to its reducer count (direct config or override)
|
||||
const reducerCounts = numericFields.map((field) => {
|
||||
// Get the direct reducer count from the field config
|
||||
const directReducers = field.config?.custom?.footer?.reducers ?? [];
|
||||
let reducerCount = directReducers.length;
|
||||
|
||||
// Check for overrides if field config is available
|
||||
if (fieldConfig?.overrides) {
|
||||
// Find override that matches this field
|
||||
const override = fieldConfig.overrides.find(
|
||||
({ matcher: { id, options } }) => id === 'byName' && options === getDisplayName(field)
|
||||
);
|
||||
|
||||
// Check if there's a footer reducer property in the override
|
||||
const footerProperty = override?.properties?.find(({ id }) => id === 'custom.footer.reducers');
|
||||
if (footerProperty?.value && Array.isArray(footerProperty.value)) {
|
||||
// If override exists, it takes precedence over direct config
|
||||
reducerCount = footerProperty.value.length;
|
||||
}
|
||||
}
|
||||
|
||||
return reducerCount;
|
||||
});
|
||||
|
||||
// Return the maximum count or 0 if no reducers found
|
||||
return reducerCounts.length > 0 ? Math.max(...reducerCounts) : 0;
|
||||
};
|
||||
|
||||
/**
|
||||
* @internal
|
||||
* Calculate the footer height based on the maximum reducer count
|
||||
*/
|
||||
export const calculateFooterHeight = (dataFrame: DataFrame, fieldConfig?: FieldConfigSource) => {
|
||||
const maxReducerCount = getMaxReducerCount(dataFrame, fieldConfig);
|
||||
export const calculateFooterHeight = (fields: Field[]): number => {
|
||||
let maxReducerCount = 0;
|
||||
for (const field of fields) {
|
||||
maxReducerCount = Math.max(maxReducerCount, field.config.custom?.footer?.reducers?.length ?? 0);
|
||||
}
|
||||
|
||||
// Base height (+ padding) + height per reducer
|
||||
return maxReducerCount * TABLE.LINE_HEIGHT + TABLE.CELL_PADDING * 2;
|
||||
return maxReducerCount > 0 ? maxReducerCount * TABLE.LINE_HEIGHT + TABLE.CELL_PADDING * 2 : 0;
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
@@ -0,0 +1,102 @@
|
||||
package commands
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/urfave/cli/v2"
|
||||
|
||||
"github.com/grafana/grafana/pkg/cmd/grafana-cli/utils"
|
||||
)
|
||||
|
||||
func TestRemoveCommand_StaticFS_FailsWithImmutableError(t *testing.T) {
|
||||
t.Run("removeCommand fails with immutable error for plugins using StaticFS", func(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
pluginID := "test-plugin"
|
||||
pluginDir := filepath.Join(tmpDir, pluginID)
|
||||
|
||||
err := os.MkdirAll(pluginDir, 0750)
|
||||
require.NoError(t, err)
|
||||
|
||||
pluginJSON := `{
|
||||
"id": "test-plugin",
|
||||
"name": "Test Plugin",
|
||||
"type": "datasource",
|
||||
"info": {
|
||||
"version": "1.0.0"
|
||||
}
|
||||
}`
|
||||
err = os.WriteFile(filepath.Join(pluginDir, "plugin.json"), []byte(pluginJSON), 0644)
|
||||
require.NoError(t, err)
|
||||
|
||||
cmdLine := createCliContextWithArgs(t, []string{pluginID}, "pluginsDir", tmpDir)
|
||||
require.NotNil(t, cmdLine)
|
||||
|
||||
// Verify plugin directory exists before attempting removal
|
||||
_, err = os.Stat(pluginDir)
|
||||
require.NoError(t, err, "Plugin directory should exist before removal attempt")
|
||||
|
||||
err = removeCommand(cmdLine)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify plugin directory has been removed
|
||||
_, err = os.Stat(pluginDir)
|
||||
require.ErrorIs(t, err, os.ErrNotExist)
|
||||
})
|
||||
}
|
||||
|
||||
func TestRemoveCommand_PluginNotFound(t *testing.T) {
|
||||
t.Run("removeCommand should handle missing plugin gracefully", func(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
cmdLine := createCliContextWithArgs(t, []string{"non-existent-plugin"}, "pluginsDir", tmpDir)
|
||||
require.NotNil(t, cmdLine)
|
||||
|
||||
err := removeCommand(cmdLine)
|
||||
require.NoError(t, err)
|
||||
})
|
||||
}
|
||||
|
||||
func TestRemoveCommand_MissingPluginParameter(t *testing.T) {
|
||||
t.Run("removeCommand should error when no plugin ID is provided", func(t *testing.T) {
|
||||
cmdLine := createCliContextWithArgs(t, []string{})
|
||||
require.NotNil(t, cmdLine)
|
||||
|
||||
err := removeCommand(cmdLine)
|
||||
require.Error(t, err)
|
||||
require.Contains(t, err.Error(), "missing plugin parameter")
|
||||
})
|
||||
}
|
||||
|
||||
// createCliContextWithArgs creates a CLI context with the specified arguments and optional flag key-value pairs.
|
||||
// Usage: createCliContextWithArgs(t, []string{"plugin-id"}, "pluginsDir", "/path/to/plugins", "flag2", "value2")
|
||||
func createCliContextWithArgs(t *testing.T, args []string, flagPairs ...string) *utils.ContextCommandLine {
|
||||
if len(flagPairs)%2 != 0 {
|
||||
t.Fatalf("flagPairs must be provided in key-value pairs, got %d arguments", len(flagPairs))
|
||||
}
|
||||
|
||||
app := &cli.App{
|
||||
Name: "grafana",
|
||||
}
|
||||
|
||||
flagSet := flag.NewFlagSet("test", 0)
|
||||
|
||||
// Add flags from the key-value pairs
|
||||
for i := 0; i < len(flagPairs); i += 2 {
|
||||
key := flagPairs[i]
|
||||
value := flagPairs[i+1]
|
||||
flagSet.String(key, "", "")
|
||||
err := flagSet.Set(key, value)
|
||||
require.NoError(t, err, "Failed to set flag %s=%s", key, value)
|
||||
}
|
||||
|
||||
err := flagSet.Parse(args)
|
||||
require.NoError(t, err)
|
||||
|
||||
ctx := cli.NewContext(app, flagSet, nil)
|
||||
return &utils.ContextCommandLine{
|
||||
Context: ctx,
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,150 @@
|
||||
package commands
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/urfave/cli/v2"
|
||||
|
||||
"github.com/grafana/grafana/pkg/cmd/grafana-cli/models"
|
||||
)
|
||||
|
||||
func TestUpgradeCommand(t *testing.T) {
|
||||
t.Run("Plugin is removed even if upgrade fails", func(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
pluginID := "test-upgrade-plugin"
|
||||
pluginDir := filepath.Join(tmpDir, pluginID)
|
||||
|
||||
err := os.MkdirAll(pluginDir, 0750)
|
||||
require.NoError(t, err)
|
||||
|
||||
pluginJSON := `{
|
||||
"id": "test-upgrade-plugin",
|
||||
"name": "Test Upgrade Plugin",
|
||||
"type": "datasource",
|
||||
"info": {
|
||||
"version": "1.0.0"
|
||||
}
|
||||
}`
|
||||
err = os.WriteFile(filepath.Join(pluginDir, "plugin.json"), []byte(pluginJSON), 0644)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Create a mock HTTP server that returns plugin info with a newer version
|
||||
mockServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
// Handle plugin info request
|
||||
if r.URL.Path == "/repo/"+pluginID {
|
||||
plugin := models.Plugin{
|
||||
ID: pluginID,
|
||||
Versions: []models.Version{
|
||||
{
|
||||
Version: "2.0.0", // Newer than the local version (1.0.0)
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
err = json.NewEncoder(w).Encode(plugin)
|
||||
require.NoError(t, err)
|
||||
return
|
||||
}
|
||||
|
||||
// For any other request (like installation), return 500 to cause the upgrade to fail
|
||||
// after the removal attempt, which is what we want to test
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
_, err = w.Write([]byte("Server error"))
|
||||
require.NoError(t, err)
|
||||
}))
|
||||
defer mockServer.Close()
|
||||
|
||||
// Use our test implementation that properly implements GcomToken()
|
||||
cmdLine := newTestCommandLine([]string{pluginID}, tmpDir, mockServer.URL)
|
||||
|
||||
// Verify plugin directory exists before attempting upgrade
|
||||
_, err = os.Stat(pluginDir)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = upgradeCommand(cmdLine)
|
||||
require.Error(t, err)
|
||||
require.Contains(t, err.Error(), "API returned invalid status: 500 Internal Server Error")
|
||||
|
||||
// Verify plugin directory was removed during the removal step
|
||||
_, err = os.Stat(pluginDir)
|
||||
require.True(t, os.IsNotExist(err))
|
||||
})
|
||||
}
|
||||
|
||||
func TestUpgradeCommand_PluginNotFound(t *testing.T) {
|
||||
t.Run("upgradeCommand should handle missing plugin gracefully", func(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
cmdLine := createCliContextWithArgs(t, []string{"non-existent-plugin"}, "pluginsDir", tmpDir)
|
||||
require.NotNil(t, cmdLine)
|
||||
|
||||
err := upgradeCommand(cmdLine)
|
||||
require.Error(t, err)
|
||||
// Should fail trying to find the local plugin
|
||||
require.Contains(t, err.Error(), "could not find plugin non-existent-plugin")
|
||||
})
|
||||
}
|
||||
|
||||
func TestUpgradeCommand_MissingPluginParameter(t *testing.T) {
|
||||
t.Run("upgradeCommand should error when no plugin ID is provided", func(t *testing.T) {
|
||||
cmdLine := createCliContextWithArgs(t, []string{})
|
||||
require.NotNil(t, cmdLine)
|
||||
|
||||
err := upgradeCommand(cmdLine)
|
||||
require.Error(t, err)
|
||||
require.Contains(t, err.Error(), "please specify plugin to update")
|
||||
})
|
||||
}
|
||||
|
||||
// Simple args implementation
|
||||
type simpleArgs []string
|
||||
|
||||
func (a simpleArgs) First() string {
|
||||
if len(a) > 0 {
|
||||
return a[0]
|
||||
}
|
||||
return ""
|
||||
}
|
||||
func (a simpleArgs) Get(int) string { return "" }
|
||||
func (a simpleArgs) Tail() []string { return nil }
|
||||
func (a simpleArgs) Len() int { return len(a) }
|
||||
func (a simpleArgs) Present() bool { return len(a) > 0 }
|
||||
func (a simpleArgs) Slice() []string { return []string(a) }
|
||||
|
||||
// Base struct with default implementations for unused CommandLine methods
|
||||
type baseCommandLine struct{}
|
||||
|
||||
func (b baseCommandLine) ShowHelp() error { return nil }
|
||||
func (b baseCommandLine) ShowVersion() {}
|
||||
func (b baseCommandLine) Application() *cli.App { return nil }
|
||||
func (b baseCommandLine) Int(_ string) int { return 0 }
|
||||
func (b baseCommandLine) String(_ string) string { return "" }
|
||||
func (b baseCommandLine) StringSlice(_ string) []string { return nil }
|
||||
func (b baseCommandLine) FlagNames() []string { return nil }
|
||||
func (b baseCommandLine) Generic(_ string) any { return nil }
|
||||
func (b baseCommandLine) Bool(_ string) bool { return false }
|
||||
func (b baseCommandLine) PluginURL() string { return "" }
|
||||
func (b baseCommandLine) GcomToken() string { return "" }
|
||||
|
||||
// Test implementation - only implements what we actually need
|
||||
type testCommandLine struct {
|
||||
baseCommandLine // Embedded struct provides default implementations
|
||||
args simpleArgs
|
||||
pluginDir string
|
||||
repoURL string
|
||||
}
|
||||
|
||||
func newTestCommandLine(args []string, pluginDir, repoURL string) *testCommandLine {
|
||||
return &testCommandLine{args: simpleArgs(args), pluginDir: pluginDir, repoURL: repoURL}
|
||||
}
|
||||
|
||||
// Only implement the methods actually used by upgradeCommand
|
||||
func (t *testCommandLine) Args() cli.Args { return t.args }
|
||||
func (t *testCommandLine) PluginDirectory() string { return t.pluginDir }
|
||||
func (t *testCommandLine) PluginRepoURL() string { return t.repoURL }
|
||||
@@ -236,6 +236,15 @@ func (f StaticFS) Files() ([]string, error) {
|
||||
return files, nil
|
||||
}
|
||||
|
||||
func (f StaticFS) Remove() error {
|
||||
if remover, ok := f.FS.(FSRemover); ok {
|
||||
if err := remover.Remove(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// LocalFile implements a fs.File for accessing the local filesystem.
|
||||
type LocalFile struct {
|
||||
f *os.File
|
||||
|
||||
@@ -270,12 +270,27 @@ func TestStaticFS(t *testing.T) {
|
||||
require.Equal(t, []string{allowedFn, deniedFn}, files)
|
||||
})
|
||||
|
||||
t.Run("staticfs filters underelying fs's files", func(t *testing.T) {
|
||||
t.Run("staticfs filters underlying fs's files", func(t *testing.T) {
|
||||
files, err := staticFS.Files()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, []string{allowedFn}, files)
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("FSRemover interface implementation verification", func(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
|
||||
lfs := NewLocalFS(tmpDir)
|
||||
var localFSInterface FS = lfs
|
||||
_, isRemover := localFSInterface.(FSRemover)
|
||||
require.True(t, isRemover)
|
||||
|
||||
sfs, err := NewStaticFS(localFS)
|
||||
require.NoError(t, err)
|
||||
var staticFSInterface FS = sfs
|
||||
_, isRemover = staticFSInterface.(FSRemover)
|
||||
require.True(t, isRemover)
|
||||
})
|
||||
}
|
||||
|
||||
// TestFSTwoDotsInFileName ensures that LocalFS and StaticFS allow two dots in file names.
|
||||
|
||||
@@ -5,6 +5,8 @@ import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"testing"
|
||||
|
||||
@@ -422,3 +424,100 @@ func createPlugin(t *testing.T, pluginID string, class plugins.Class, managed, b
|
||||
func testCompatOpts() plugins.AddOpts {
|
||||
return plugins.NewAddOpts("10.0.0", runtime.GOOS, runtime.GOARCH, "")
|
||||
}
|
||||
|
||||
func TestPluginInstaller_Removal(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
|
||||
t.Run("LocalFS plugin removal succeeds via installer.Remove", func(t *testing.T) {
|
||||
pluginDir := filepath.Join(tmpDir, "localfs-plugin")
|
||||
err := os.MkdirAll(pluginDir, 0750)
|
||||
require.NoError(t, err)
|
||||
|
||||
pluginJSON := `{
|
||||
"id": "localfs-plugin",
|
||||
"name": "LocalFS Plugin",
|
||||
"type": "datasource",
|
||||
"info": {
|
||||
"version": "1.0.0"
|
||||
}
|
||||
}`
|
||||
err = os.WriteFile(filepath.Join(pluginDir, "plugin.json"), []byte(pluginJSON), 0644)
|
||||
require.NoError(t, err)
|
||||
|
||||
localFS := plugins.NewLocalFS(pluginDir)
|
||||
pluginV1 := createPlugin(t, "localfs-plugin", plugins.ClassExternal, true, true, func(plugin *plugins.Plugin) {
|
||||
plugin.Info.Version = "1.0.0"
|
||||
plugin.FS = localFS
|
||||
})
|
||||
|
||||
registry := &fakes.FakePluginRegistry{
|
||||
Store: map[string]*plugins.Plugin{
|
||||
"localfs-plugin": pluginV1,
|
||||
},
|
||||
}
|
||||
|
||||
loader := &fakes.FakeLoader{
|
||||
UnloadFunc: func(_ context.Context, p *plugins.Plugin) (*plugins.Plugin, error) {
|
||||
return p, nil
|
||||
},
|
||||
}
|
||||
|
||||
_, err = os.Stat(pluginDir)
|
||||
require.NoError(t, err)
|
||||
|
||||
inst := New(&config.PluginManagementCfg{}, registry, loader, &fakes.FakePluginRepo{}, &fakes.FakePluginStorage{}, storage.SimpleDirNameGeneratorFunc, &fakes.FakeAuthService{})
|
||||
err = inst.Remove(context.Background(), "localfs-plugin", "1.0.0")
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = os.Stat(pluginDir)
|
||||
require.True(t, os.IsNotExist(err))
|
||||
})
|
||||
|
||||
t.Run("StaticFS plugin removal is skipped via installer.Remove", func(t *testing.T) {
|
||||
pluginDir := filepath.Join(tmpDir, "staticfs-plugin")
|
||||
err := os.MkdirAll(pluginDir, 0750)
|
||||
require.NoError(t, err)
|
||||
|
||||
pluginJSON := `{
|
||||
"id": "staticfs-plugin",
|
||||
"name": "StaticFS Plugin",
|
||||
"type": "datasource",
|
||||
"info": {
|
||||
"version": "1.0.0"
|
||||
}
|
||||
}`
|
||||
err = os.WriteFile(filepath.Join(pluginDir, "plugin.json"), []byte(pluginJSON), 0644)
|
||||
require.NoError(t, err)
|
||||
|
||||
localFS := plugins.NewLocalFS(pluginDir)
|
||||
staticFS, err := plugins.NewStaticFS(localFS)
|
||||
require.NoError(t, err)
|
||||
|
||||
pluginV1 := createPlugin(t, "staticfs-plugin", plugins.ClassExternal, true, true, func(plugin *plugins.Plugin) {
|
||||
plugin.Info.Version = "1.0.0"
|
||||
plugin.FS = staticFS
|
||||
})
|
||||
|
||||
registry := &fakes.FakePluginRegistry{
|
||||
Store: map[string]*plugins.Plugin{
|
||||
"staticfs-plugin": pluginV1,
|
||||
},
|
||||
}
|
||||
|
||||
loader := &fakes.FakeLoader{
|
||||
UnloadFunc: func(_ context.Context, p *plugins.Plugin) (*plugins.Plugin, error) {
|
||||
return p, nil
|
||||
},
|
||||
}
|
||||
|
||||
_, err = os.Stat(pluginDir)
|
||||
require.NoError(t, err)
|
||||
|
||||
inst := New(&config.PluginManagementCfg{}, registry, loader, &fakes.FakePluginRepo{}, &fakes.FakePluginStorage{}, storage.SimpleDirNameGeneratorFunc, &fakes.FakeAuthService{})
|
||||
err = inst.Remove(context.Background(), "staticfs-plugin", "1.0.0")
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = os.Stat(pluginDir)
|
||||
require.ErrorIs(t, err, os.ErrNotExist)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package sources
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
@@ -110,3 +111,32 @@ func TestDirAsLocalSources(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestLocalSource(t *testing.T) {
|
||||
t.Run("NewLocalSource should always return plugins with StaticFS", func(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
pluginID := "test-plugin"
|
||||
pluginDir := filepath.Join(tmpDir, pluginID)
|
||||
|
||||
err := os.MkdirAll(pluginDir, 0750)
|
||||
require.NoError(t, err)
|
||||
|
||||
pluginJSON := `{
|
||||
"id": "test-plugin",
|
||||
"name": "Test Plugin",
|
||||
"type": "datasource",
|
||||
"info": {
|
||||
"version": "1.0.0"
|
||||
}
|
||||
}`
|
||||
err = os.WriteFile(filepath.Join(pluginDir, "plugin.json"), []byte(pluginJSON), 0644)
|
||||
require.NoError(t, err)
|
||||
|
||||
bundles, err := NewLocalSource(plugins.ClassExternal, []string{pluginDir}).Discover(t.Context())
|
||||
require.NoError(t, err)
|
||||
require.Len(t, bundles, 1, "Should discover exactly one plugin")
|
||||
require.Equal(t, pluginID, bundles[0].Primary.JSONData.ID)
|
||||
_, canRemove := bundles[0].Primary.FS.(plugins.FSRemover)
|
||||
require.True(t, canRemove)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -401,6 +401,7 @@ const (
|
||||
// Settings scope
|
||||
ScopeSettingsAll = "settings:*"
|
||||
ScopeSettingsSAML = "settings:auth.saml:*"
|
||||
ScopeSettingsSCIM = "settings:auth.scim:*"
|
||||
|
||||
// Team related actions
|
||||
ActionTeamsCreate = "teams:create"
|
||||
|
||||
@@ -273,6 +273,14 @@ var (
|
||||
Action: ActionSettingsWrite,
|
||||
Scope: ScopeSettingsOAuth("ldap"),
|
||||
},
|
||||
{
|
||||
Action: ActionSettingsRead,
|
||||
Scope: ScopeSettingsSCIM,
|
||||
},
|
||||
{
|
||||
Action: ActionSettingsWrite,
|
||||
Scope: ScopeSettingsSCIM,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/featuremgmt"
|
||||
"github.com/grafana/grafana/pkg/services/sqlstore/permissions"
|
||||
"github.com/grafana/grafana/pkg/services/sqlstore/searchstore"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -28,16 +29,21 @@ var (
|
||||
)
|
||||
|
||||
type AuthService struct {
|
||||
db db.DB
|
||||
features featuremgmt.FeatureToggles
|
||||
dashSvc dashboards.DashboardService
|
||||
db db.DB
|
||||
features featuremgmt.FeatureToggles
|
||||
dashSvc dashboards.DashboardService
|
||||
searchDashboardsPageLimit int64
|
||||
}
|
||||
|
||||
func NewAuthService(db db.DB, features featuremgmt.FeatureToggles, dashSvc dashboards.DashboardService) *AuthService {
|
||||
func NewAuthService(db db.DB, features featuremgmt.FeatureToggles, dashSvc dashboards.DashboardService, cfg *setting.Cfg) *AuthService {
|
||||
section := cfg.Raw.Section("annotations")
|
||||
searchDashboardsPageLimit := section.Key("search_dashboards_page_limit").MustInt64(1000)
|
||||
|
||||
return &AuthService{
|
||||
db: db,
|
||||
features: features,
|
||||
dashSvc: dashSvc,
|
||||
db: db,
|
||||
features: features,
|
||||
dashSvc: dashSvc,
|
||||
searchDashboardsPageLimit: searchDashboardsPageLimit,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -137,7 +143,7 @@ func (authz *AuthService) dashboardsWithVisibleAnnotations(ctx context.Context,
|
||||
SignedInUser: query.SignedInUser,
|
||||
Page: query.Page,
|
||||
Type: filterType,
|
||||
Limit: 1000,
|
||||
Limit: authz.searchDashboardsPageLimit,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -56,7 +56,7 @@ func ProvideService(
|
||||
return &RepositoryImpl{
|
||||
db: db,
|
||||
features: features,
|
||||
authZ: accesscontrol.NewAuthService(db, features, dashSvc),
|
||||
authZ: accesscontrol.NewAuthService(db, features, dashSvc, cfg),
|
||||
reader: read,
|
||||
writer: write,
|
||||
}
|
||||
|
||||
@@ -688,15 +688,15 @@ var validConfigWithAutogen = `{
|
||||
"receiver": "some email",
|
||||
"object_matchers": [["__grafana_autogenerated__", "=", "true"]],
|
||||
"routes": [{
|
||||
"receiver": "some email",
|
||||
"group_by": ["grafana_folder", "alertname"],
|
||||
"object_matchers": [["__grafana_receiver__", "=", "some email"]],
|
||||
"continue": false
|
||||
},{
|
||||
"receiver": "other email",
|
||||
"group_by": ["grafana_folder", "alertname"],
|
||||
"object_matchers": [["__grafana_receiver__", "=", "other email"]],
|
||||
"continue": false
|
||||
},{
|
||||
"receiver": "some email",
|
||||
"group_by": ["grafana_folder", "alertname"],
|
||||
"object_matchers": [["__grafana_receiver__", "=", "some email"]],
|
||||
"continue": false
|
||||
}]
|
||||
},{
|
||||
"receiver": "other email",
|
||||
|
||||
@@ -190,8 +190,12 @@ func (s *NotificationSettings) Fingerprint() data.Fingerprint {
|
||||
for _, interval := range s.MuteTimeIntervals {
|
||||
writeString(interval)
|
||||
}
|
||||
// Add a separator between the time intervals to avoid collisions
|
||||
// when all settings are the same including interval names except for the interval type (mute vs active).
|
||||
_, _ = h.Write([]byte{255})
|
||||
for _, interval := range s.ActiveTimeIntervals {
|
||||
writeString(interval)
|
||||
}
|
||||
|
||||
return data.Fingerprint(h.Sum64())
|
||||
}
|
||||
|
||||
@@ -113,6 +113,8 @@ func TestValidate(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestNotificationSettingsLabels(t *testing.T) {
|
||||
timeInterval := "time-interval-1"
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
notificationSettings NotificationSettings
|
||||
@@ -135,7 +137,7 @@ func TestNotificationSettingsLabels(t *testing.T) {
|
||||
labels: data.Labels{
|
||||
AutogeneratedRouteLabel: "true",
|
||||
AutogeneratedRouteReceiverNameLabel: "receiver name",
|
||||
AutogeneratedRouteSettingsHashLabel: "6027cdeaff62ba3f",
|
||||
AutogeneratedRouteSettingsHashLabel: "c65d254ff4c279f2",
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -151,7 +153,7 @@ func TestNotificationSettingsLabels(t *testing.T) {
|
||||
labels: data.Labels{
|
||||
AutogeneratedRouteLabel: "true",
|
||||
AutogeneratedRouteReceiverNameLabel: "receiver name",
|
||||
AutogeneratedRouteSettingsHashLabel: "47164c92f2986a35",
|
||||
AutogeneratedRouteSettingsHashLabel: "634e52b238fc78f0",
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -168,7 +170,25 @@ func TestNotificationSettingsLabels(t *testing.T) {
|
||||
labels: data.Labels{
|
||||
AutogeneratedRouteLabel: "true",
|
||||
AutogeneratedRouteReceiverNameLabel: "receiver name",
|
||||
AutogeneratedRouteSettingsHashLabel: "a173df6210e43af0",
|
||||
AutogeneratedRouteSettingsHashLabel: "9ac606ba0f6bcfb5",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "default notification settings with active time interval",
|
||||
notificationSettings: CopyNotificationSettings(NewDefaultNotificationSettings("receiver name"), NSMuts.WithActiveTimeIntervals(timeInterval)),
|
||||
labels: data.Labels{
|
||||
AutogeneratedRouteLabel: "true",
|
||||
AutogeneratedRouteReceiverNameLabel: "receiver name",
|
||||
AutogeneratedRouteSettingsHashLabel: "8304d9c06fda36e2",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "default notification settings with mute time interval",
|
||||
notificationSettings: CopyNotificationSettings(NewDefaultNotificationSettings("receiver name"), NSMuts.WithMuteTimeIntervals(timeInterval)),
|
||||
labels: data.Labels{
|
||||
AutogeneratedRouteLabel: "true",
|
||||
AutogeneratedRouteReceiverNameLabel: "receiver name",
|
||||
AutogeneratedRouteSettingsHashLabel: "171cfd2d4e0810fa",
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -181,6 +201,27 @@ func TestNotificationSettingsLabels(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestNotificationSettings_TimeIntervals(t *testing.T) {
|
||||
// Create notification settings with default settings and usign the same
|
||||
// time interval, but in one case as a mute time interval and in another case
|
||||
// as an active time interval. They should produce different hashes.
|
||||
|
||||
receiver := "receiver name"
|
||||
timeInterval := "time interval name"
|
||||
|
||||
muteSettings := NotificationSettings{
|
||||
Receiver: receiver,
|
||||
MuteTimeIntervals: []string{timeInterval},
|
||||
}
|
||||
|
||||
activeSettings := NotificationSettings{
|
||||
Receiver: receiver,
|
||||
ActiveTimeIntervals: []string{timeInterval},
|
||||
}
|
||||
|
||||
require.NotEqual(t, activeSettings.Fingerprint(), muteSettings.Fingerprint())
|
||||
}
|
||||
|
||||
func TestNormalizedGroupBy(t *testing.T) {
|
||||
validNotificationSettings := NotificationSettingsGen()
|
||||
|
||||
|
||||
@@ -86,9 +86,9 @@ func TestAddAutogenConfig(t *testing.T) {
|
||||
Receiver: "default",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteLabel, "true"),
|
||||
Routes: []*definitions.Route{
|
||||
basicContactRoute("receiver1"),
|
||||
basicContactRoute("receiver3"),
|
||||
basicContactRoute("receiver2"),
|
||||
basicContactRoute("receiver3"),
|
||||
basicContactRoute("receiver1"),
|
||||
},
|
||||
}),
|
||||
},
|
||||
@@ -100,9 +100,9 @@ func TestAddAutogenConfig(t *testing.T) {
|
||||
Receiver: "default",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteLabel, "true"),
|
||||
Routes: []*definitions.Route{
|
||||
basicContactRoute("receiver1"),
|
||||
basicContactRoute("receiver3"),
|
||||
basicContactRoute("receiver2"),
|
||||
basicContactRoute("receiver3"),
|
||||
basicContactRoute("receiver1"),
|
||||
},
|
||||
}),
|
||||
},
|
||||
@@ -130,42 +130,42 @@ func TestAddAutogenConfig(t *testing.T) {
|
||||
Receiver: "default",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteLabel, "true"),
|
||||
Routes: []*definitions.Route{
|
||||
withChildRoutes(basicContactRoute("receiver5"), &definitions.Route{
|
||||
Receiver: "receiver5",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "030d6474aec0b553"),
|
||||
MuteTimeIntervals: []string{"maintenance"},
|
||||
}, &definitions.Route{
|
||||
Receiver: "receiver5",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "cd6cd2089632453c"),
|
||||
ActiveTimeIntervals: []string{"active"},
|
||||
}),
|
||||
withChildRoutes(basicContactRoute("receiver1"), &definitions.Route{
|
||||
Receiver: "receiver1",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "dde34b8127e68f31"),
|
||||
GroupInterval: util.Pointer(model.Duration(1 * time.Minute)),
|
||||
}, &definitions.Route{
|
||||
Receiver: "receiver1",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "f134b8faf7db083c"),
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "02466789dc88da23"),
|
||||
GroupByStr: []string{models.FolderTitleLabel, model.AlertNameLabel, "custom"},
|
||||
GroupInterval: util.Pointer(model.Duration(1 * time.Minute)),
|
||||
GroupWait: util.Pointer(model.Duration(2 * time.Minute)),
|
||||
RepeatInterval: util.Pointer(model.Duration(3 * time.Minute)),
|
||||
MuteTimeIntervals: []string{"maintenance"},
|
||||
ActiveTimeIntervals: []string{"active"},
|
||||
}, &definitions.Route{
|
||||
Receiver: "receiver1",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "efc87d76ccc550bc"),
|
||||
GroupInterval: util.Pointer(model.Duration(1 * time.Minute)),
|
||||
}),
|
||||
withChildRoutes(basicContactRoute("receiver2"), &definitions.Route{
|
||||
Receiver: "receiver2",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "27e1d1717c9ef621"),
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "63ad04d6c21c3aec"),
|
||||
GroupWait: util.Pointer(model.Duration(2 * time.Minute)),
|
||||
}),
|
||||
withChildRoutes(basicContactRoute("receiver5"), &definitions.Route{
|
||||
Receiver: "receiver5",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "8cd5f9adeac58123"),
|
||||
ActiveTimeIntervals: []string{"active"},
|
||||
}, &definitions.Route{
|
||||
Receiver: "receiver5",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "f0770544f1741cf6"),
|
||||
MuteTimeIntervals: []string{"maintenance"},
|
||||
}),
|
||||
withChildRoutes(basicContactRoute("receiver4"), &definitions.Route{
|
||||
Receiver: "receiver4",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "b3a2fa5e615dcc7e"),
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "9bbbec5f72627ae5"),
|
||||
GroupByStr: []string{models.FolderTitleLabel, model.AlertNameLabel, "custom"},
|
||||
}),
|
||||
withChildRoutes(basicContactRoute("receiver3"), &definitions.Route{
|
||||
Receiver: "receiver3",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "9e282ef0193d830a"),
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "fbcacbfae385a901"),
|
||||
RepeatInterval: util.Pointer(model.Duration(3 * time.Minute)),
|
||||
}),
|
||||
},
|
||||
@@ -183,7 +183,7 @@ func TestAddAutogenConfig(t *testing.T) {
|
||||
Routes: []*definitions.Route{
|
||||
withChildRoutes(basicContactRoute("receiver1"), &definitions.Route{
|
||||
Receiver: "receiver1",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "dde34b8127e68f31"),
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "efc87d76ccc550bc"),
|
||||
GroupByStr: nil,
|
||||
GroupInterval: util.Pointer(model.Duration(1 * time.Minute)),
|
||||
}),
|
||||
@@ -203,13 +203,13 @@ func TestAddAutogenConfig(t *testing.T) {
|
||||
Routes: []*definitions.Route{
|
||||
withChildRoutes(basicContactRoute("receiver1"), &definitions.Route{
|
||||
Receiver: "receiver1",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "dde34b8127e68f31"),
|
||||
GroupByStr: nil,
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "828092ed6f427a00"), // Different hash.
|
||||
GroupByStr: []string{models.FolderTitleLabel, model.AlertNameLabel},
|
||||
GroupInterval: util.Pointer(model.Duration(1 * time.Minute)),
|
||||
}, &definitions.Route{
|
||||
Receiver: "receiver1",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "e1f3a275a8918385"), // Different hash.
|
||||
GroupByStr: []string{models.FolderTitleLabel, model.AlertNameLabel},
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "efc87d76ccc550bc"),
|
||||
GroupByStr: nil,
|
||||
GroupInterval: util.Pointer(model.Duration(1 * time.Minute)),
|
||||
}),
|
||||
},
|
||||
@@ -229,7 +229,7 @@ func TestAddAutogenConfig(t *testing.T) {
|
||||
Routes: []*definitions.Route{
|
||||
withChildRoutes(basicContactRoute("receiver1"), &definitions.Route{
|
||||
Receiver: "receiver1",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "e1f3a275a8918385"),
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "828092ed6f427a00"),
|
||||
GroupByStr: []string{models.FolderTitleLabel, model.AlertNameLabel},
|
||||
GroupInterval: util.Pointer(model.Duration(1 * time.Minute)),
|
||||
}),
|
||||
@@ -249,9 +249,9 @@ func TestAddAutogenConfig(t *testing.T) {
|
||||
Receiver: "default",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteLabel, "true"),
|
||||
Routes: []*definitions.Route{
|
||||
basicContactRoute("receiver1"),
|
||||
basicContactRoute("receiver3"),
|
||||
basicContactRoute("receiver2"),
|
||||
basicContactRoute("receiver3"),
|
||||
basicContactRoute("receiver1"),
|
||||
},
|
||||
}),
|
||||
},
|
||||
|
||||
+7
-5
@@ -472,14 +472,16 @@ function handleRedirectTo(): void {
|
||||
}
|
||||
|
||||
window.sessionStorage.removeItem(RedirectToUrlKey);
|
||||
const decodedRedirectTo = decodeURIComponent(redirectTo);
|
||||
let decodedRedirectTo = decodeURIComponent(redirectTo);
|
||||
if (decodedRedirectTo.startsWith('/goto/')) {
|
||||
// In this case there should be a request to the backend
|
||||
window.location.replace(decodedRedirectTo);
|
||||
} else {
|
||||
const stripped = locationUtil.stripBaseFromUrl(decodedRedirectTo);
|
||||
locationService.replace(stripped);
|
||||
const urlToRedirectTo = locationUtil.assureBaseUrl(decodedRedirectTo);
|
||||
window.location.replace(urlToRedirectTo);
|
||||
return;
|
||||
}
|
||||
// Ensure that the appsuburl is stripped from the redirect to in case of a frontend redirect
|
||||
const stripped = locationUtil.stripBaseFromUrl(decodedRedirectTo);
|
||||
locationService.replace(stripped);
|
||||
}
|
||||
|
||||
export default new GrafanaApp();
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { useCallback, useEffect, useState } from 'react';
|
||||
import { useForm } from 'react-hook-form';
|
||||
import { connect, ConnectedProps } from 'react-redux';
|
||||
|
||||
import { NavModelItem } from '@grafana/data';
|
||||
import { Trans, t } from '@grafana/i18n';
|
||||
@@ -11,8 +10,7 @@ import { contextSrv } from 'app/core/core';
|
||||
import { GrafanaRouteComponentProps } from 'app/core/navigation/types';
|
||||
import { AccessControlAction } from 'app/types/accessControl';
|
||||
import { AppNotificationSeverity } from 'app/types/appNotifications';
|
||||
import { LdapConnectionInfo, LdapUser, SyncInfo, LdapError } from 'app/types/ldap';
|
||||
import { StoreState } from 'app/types/store';
|
||||
import { useDispatch, useSelector } from 'app/types/store';
|
||||
|
||||
import {
|
||||
loadLdapState,
|
||||
@@ -26,13 +24,7 @@ import { LdapConnectionStatus } from './LdapConnectionStatus';
|
||||
import { LdapSyncInfo } from './LdapSyncInfo';
|
||||
import { LdapUserInfo } from './LdapUserInfo';
|
||||
|
||||
interface OwnProps extends GrafanaRouteComponentProps<{}, { username?: string }> {
|
||||
ldapConnectionInfo: LdapConnectionInfo;
|
||||
ldapUser?: LdapUser;
|
||||
ldapSyncInfo?: SyncInfo;
|
||||
ldapError?: LdapError;
|
||||
userError?: LdapError;
|
||||
}
|
||||
interface Props extends GrafanaRouteComponentProps<{}, { username?: string }> {}
|
||||
|
||||
interface FormModel {
|
||||
username: string;
|
||||
@@ -45,36 +37,31 @@ const pageNav: NavModelItem = {
|
||||
id: 'LDAP',
|
||||
};
|
||||
|
||||
export const LdapPage = ({
|
||||
clearUserMappingInfo,
|
||||
queryParams,
|
||||
loadLdapState,
|
||||
loadLdapSyncStatus,
|
||||
loadUserMapping,
|
||||
clearUserError,
|
||||
ldapUser,
|
||||
userError,
|
||||
ldapError,
|
||||
ldapSyncInfo,
|
||||
ldapConnectionInfo,
|
||||
}: Props) => {
|
||||
export const LdapPage = ({ queryParams }: Props) => {
|
||||
const dispatch = useDispatch();
|
||||
|
||||
const ldapConnectionInfo = useSelector((state) => state.ldap.connectionInfo);
|
||||
const ldapUser = useSelector((state) => state.ldap.user);
|
||||
const ldapSyncInfo = useSelector((state) => state.ldap.syncInfo);
|
||||
const userError = useSelector((state) => state.ldap.userError);
|
||||
const ldapError = useSelector((state) => state.ldap.ldapError);
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const { register, handleSubmit } = useForm<FormModel>();
|
||||
|
||||
const fetchUserMapping = useCallback(
|
||||
async (username: string) => {
|
||||
return loadUserMapping(username);
|
||||
return dispatch(loadUserMapping(username));
|
||||
},
|
||||
[loadUserMapping]
|
||||
[dispatch]
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
const fetchLDAPStatus = async () => {
|
||||
return Promise.all([loadLdapState(), loadLdapSyncStatus()]);
|
||||
return Promise.all([dispatch(loadLdapState()), dispatch(loadLdapSyncStatus())]);
|
||||
};
|
||||
|
||||
async function init() {
|
||||
await clearUserMappingInfo();
|
||||
await dispatch(clearUserMappingInfo());
|
||||
await fetchLDAPStatus();
|
||||
|
||||
if (queryParams.username) {
|
||||
@@ -85,7 +72,7 @@ export const LdapPage = ({
|
||||
}
|
||||
|
||||
init();
|
||||
}, [clearUserMappingInfo, fetchUserMapping, loadLdapState, loadLdapSyncStatus, queryParams]);
|
||||
}, [dispatch, fetchUserMapping, queryParams]);
|
||||
|
||||
const search = ({ username }: FormModel) => {
|
||||
if (username) {
|
||||
@@ -94,7 +81,7 @@ export const LdapPage = ({
|
||||
};
|
||||
|
||||
const onClearUserError = () => {
|
||||
clearUserError();
|
||||
dispatch(clearUserError());
|
||||
};
|
||||
|
||||
const canReadLDAPUser = contextSrv.hasPermission(AccessControlAction.LDAPUsersRead);
|
||||
@@ -147,23 +134,4 @@ export const LdapPage = ({
|
||||
);
|
||||
};
|
||||
|
||||
const mapStateToProps = (state: StoreState) => ({
|
||||
ldapConnectionInfo: state.ldap.connectionInfo,
|
||||
ldapUser: state.ldap.user,
|
||||
ldapSyncInfo: state.ldap.syncInfo,
|
||||
userError: state.ldap.userError,
|
||||
ldapError: state.ldap.ldapError,
|
||||
});
|
||||
|
||||
const mapDispatchToProps = {
|
||||
loadLdapState,
|
||||
loadLdapSyncStatus,
|
||||
loadUserMapping,
|
||||
clearUserError,
|
||||
clearUserMappingInfo,
|
||||
};
|
||||
|
||||
const connector = connect(mapStateToProps, mapDispatchToProps);
|
||||
type Props = OwnProps & ConnectedProps<typeof connector>;
|
||||
|
||||
export default connector(LdapPage);
|
||||
export default LdapPage;
|
||||
|
||||
@@ -31,6 +31,7 @@ import { LdapPayload, MapKeyCertConfigured } from 'app/types/ldap';
|
||||
import { StoreState } from 'app/types/store';
|
||||
|
||||
import { LdapDrawerComponent } from './LdapDrawer';
|
||||
import { LdapTestDrawer } from './LdapTestDrawer';
|
||||
|
||||
const appEvents = getAppEvents();
|
||||
|
||||
@@ -99,6 +100,8 @@ const emptySettings: LdapPayload = {
|
||||
export const LdapSettingsPage = () => {
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const [isDrawerOpen, setIsDrawerOpen] = useState(false);
|
||||
const [isTestDrawerOpen, setIsTestDrawerOpen] = useState(false);
|
||||
const [usernameParam, setUsernameParam] = useState<string | null>(null);
|
||||
|
||||
const [isBindPasswordConfigured, setBindPasswordConfigured] = useState(false);
|
||||
const [mapKeyCertConfigured, setMapKeyCertConfigured] = useState<MapKeyCertConfigured>({
|
||||
@@ -122,6 +125,10 @@ export const LdapSettingsPage = () => {
|
||||
|
||||
useEffect(() => {
|
||||
async function init() {
|
||||
const urlParams = new URLSearchParams(window.location.search);
|
||||
const username = urlParams.get('username');
|
||||
setUsernameParam(username);
|
||||
|
||||
const payload = await getSettings();
|
||||
let serverConfig = emptySettings.settings.config.servers[0];
|
||||
if (payload.settings.config.servers?.length > 0) {
|
||||
@@ -135,6 +142,10 @@ export const LdapSettingsPage = () => {
|
||||
|
||||
reset(payload);
|
||||
setIsLoading(false);
|
||||
|
||||
if (username) {
|
||||
setIsTestDrawerOpen(true);
|
||||
}
|
||||
}
|
||||
init();
|
||||
}, [reset]); // eslint-disable-line react-hooks/exhaustive-deps
|
||||
@@ -416,6 +427,9 @@ export const LdapSettingsPage = () => {
|
||||
<Button variant="secondary" onClick={handleSubmit(saveForm)}>
|
||||
<Trans i18nKey="ldap-settings-page.buttons-section.save-button">Save</Trans>
|
||||
</Button>
|
||||
<Button variant="secondary" onClick={() => setIsTestDrawerOpen(true)}>
|
||||
<Trans i18nKey="ldap-settings-page.buttons-section.test-button">Test</Trans>
|
||||
</Button>
|
||||
<LinkButton href="/admin/authentication" variant="secondary">
|
||||
<Trans i18nKey="ldap-settings-page.buttons-section.discard-button">Discard</Trans>
|
||||
</LinkButton>
|
||||
@@ -455,6 +469,9 @@ export const LdapSettingsPage = () => {
|
||||
/>
|
||||
)}
|
||||
</form>
|
||||
{isTestDrawerOpen && (
|
||||
<LdapTestDrawer onClose={() => setIsTestDrawerOpen(false)} username={usernameParam || undefined} />
|
||||
)}
|
||||
</FormProvider>
|
||||
</Page.Contents>
|
||||
</Page>
|
||||
|
||||
@@ -0,0 +1,138 @@
|
||||
import { useCallback, useEffect, useState } from 'react';
|
||||
import { useForm } from 'react-hook-form';
|
||||
|
||||
import { Trans, t } from '@grafana/i18n';
|
||||
import { featureEnabled } from '@grafana/runtime';
|
||||
import { Alert, Button, Drawer, Field, Input, LoadingPlaceholder, Stack, Text } from '@grafana/ui';
|
||||
import { contextSrv } from 'app/core/core';
|
||||
import { AccessControlAction } from 'app/types/accessControl';
|
||||
import { AppNotificationSeverity } from 'app/types/appNotifications';
|
||||
import { useDispatch, useSelector } from 'app/types/store';
|
||||
|
||||
import {
|
||||
loadLdapState,
|
||||
loadLdapSyncStatus,
|
||||
loadUserMapping,
|
||||
clearUserError,
|
||||
clearUserMappingInfo,
|
||||
} from '../state/actions';
|
||||
|
||||
import { LdapConnectionStatus } from './LdapConnectionStatus';
|
||||
import { LdapSyncInfo } from './LdapSyncInfo';
|
||||
import { LdapUserInfo } from './LdapUserInfo';
|
||||
|
||||
interface Props {
|
||||
onClose: () => void;
|
||||
username?: string;
|
||||
}
|
||||
|
||||
interface FormModel {
|
||||
username: string;
|
||||
}
|
||||
|
||||
export const LdapTestDrawer = ({ onClose, username }: Props) => {
|
||||
const dispatch = useDispatch();
|
||||
|
||||
const ldapConnectionInfo = useSelector((state) => state.ldap.connectionInfo);
|
||||
const ldapUser = useSelector((state) => state.ldap.user);
|
||||
const ldapSyncInfo = useSelector((state) => state.ldap.syncInfo);
|
||||
const userError = useSelector((state) => state.ldap.userError);
|
||||
const ldapError = useSelector((state) => state.ldap.ldapError);
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const { register, handleSubmit } = useForm<FormModel>();
|
||||
|
||||
const fetchUserMapping = useCallback(
|
||||
async (username: string) => {
|
||||
return dispatch(loadUserMapping(username));
|
||||
},
|
||||
[dispatch]
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
const fetchLDAPStatus = async () => {
|
||||
return Promise.all([dispatch(loadLdapState()), dispatch(loadLdapSyncStatus())]);
|
||||
};
|
||||
|
||||
async function init() {
|
||||
dispatch(clearUserMappingInfo());
|
||||
await fetchLDAPStatus();
|
||||
|
||||
if (username) {
|
||||
await fetchUserMapping(username);
|
||||
}
|
||||
|
||||
setIsLoading(false);
|
||||
}
|
||||
|
||||
init();
|
||||
}, [dispatch, fetchUserMapping, username]);
|
||||
|
||||
const search = (data: FormModel, event?: React.BaseSyntheticEvent) => {
|
||||
event?.preventDefault();
|
||||
event?.stopPropagation();
|
||||
if (data.username) {
|
||||
fetchUserMapping(data.username);
|
||||
}
|
||||
};
|
||||
|
||||
const onClearUserError = () => {
|
||||
dispatch(clearUserError());
|
||||
};
|
||||
|
||||
const canReadLDAPUser = contextSrv.hasPermission(AccessControlAction.LDAPUsersRead);
|
||||
return (
|
||||
<Drawer
|
||||
title={t('admin.ldap.debug-title', 'LDAP Diagnostics')}
|
||||
subtitle={t('admin.ldap.debug-subtitle', 'Verify your LDAP and user mapping configuration.')}
|
||||
onClose={onClose}
|
||||
>
|
||||
{isLoading ? (
|
||||
<LoadingPlaceholder text={t('admin.ldap.text-loading-ldap-status', 'Loading LDAP status...')} />
|
||||
) : (
|
||||
<Stack direction="column" gap={4}>
|
||||
{ldapError && ldapError.title && (
|
||||
<Alert title={ldapError.title} severity={AppNotificationSeverity.Error}>
|
||||
{ldapError.body}
|
||||
</Alert>
|
||||
)}
|
||||
|
||||
<LdapConnectionStatus ldapConnectionInfo={ldapConnectionInfo} />
|
||||
|
||||
{featureEnabled('ldapsync') && ldapSyncInfo && <LdapSyncInfo ldapSyncInfo={ldapSyncInfo} />}
|
||||
|
||||
{canReadLDAPUser && (
|
||||
<section>
|
||||
<Stack direction="column" gap={2}>
|
||||
<Text element="h3">
|
||||
<Trans i18nKey="admin.ldap.test-mapping-heading">Test user mapping</Trans>
|
||||
</Text>
|
||||
<form onSubmit={handleSubmit(search)}>
|
||||
<Field noMargin label={t('admin.ldap-page.label-username', 'Username')}>
|
||||
<Stack>
|
||||
<Input
|
||||
{...register('username', { required: true })}
|
||||
width={34}
|
||||
id="username"
|
||||
type="text"
|
||||
defaultValue={username}
|
||||
/>
|
||||
<Button variant="secondary" type="submit">
|
||||
<Trans i18nKey="admin.ldap.test-mapping-run-button">Run</Trans>
|
||||
</Button>
|
||||
</Stack>
|
||||
</Field>
|
||||
</form>
|
||||
{userError && userError.title && (
|
||||
<Alert title={userError.title} severity={AppNotificationSeverity.Error} onRemove={onClearUserError}>
|
||||
{userError.body}
|
||||
</Alert>
|
||||
)}
|
||||
{ldapUser && <LdapUserInfo ldapUser={ldapUser} />}
|
||||
</Stack>
|
||||
</section>
|
||||
)}
|
||||
</Stack>
|
||||
)}
|
||||
</Drawer>
|
||||
);
|
||||
};
|
||||
@@ -147,10 +147,10 @@ function DashboardControlsRenderer({ model }: SceneComponentProps<DashboardContr
|
||||
{editPanel && <PanelEditControls panelEditor={editPanel} />}
|
||||
</Stack>
|
||||
{!hideTimeControls && (
|
||||
<Stack justifyContent="flex-end">
|
||||
<div className={cx(styles.timeControls, editPanel && styles.timeControlsWrap)}>
|
||||
<timePicker.Component model={timePicker} />
|
||||
<refreshPicker.Component model={refreshPicker} />
|
||||
</Stack>
|
||||
</div>
|
||||
)}
|
||||
<Stack>
|
||||
<DropdownVariableControls dashboard={dashboard} />
|
||||
@@ -191,6 +191,7 @@ function getStyles(theme: GrafanaTheme2) {
|
||||
},
|
||||
}),
|
||||
controlsPanelEdit: css({
|
||||
flexWrap: 'wrap-reverse',
|
||||
// In panel edit we do not need any right padding as the splitter is providing it
|
||||
paddingRight: 0,
|
||||
}),
|
||||
@@ -198,5 +199,14 @@ function getStyles(theme: GrafanaTheme2) {
|
||||
background: 'unset',
|
||||
position: 'unset',
|
||||
}),
|
||||
timeControls: css({
|
||||
display: 'flex',
|
||||
justifyContent: 'flex-end',
|
||||
gap: theme.spacing(1),
|
||||
}),
|
||||
timeControlsWrap: css({
|
||||
flexWrap: 'wrap',
|
||||
marginLeft: 'auto',
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user