Compare commits

..

2 Commits

Author SHA1 Message Date
Gonzalo Trigueros
9256705dd4 provisioning: refactor hierarchical errors in folder management. 2025-12-18 10:36:04 +01:00
Roberto Jimenez Sanchez
8c869c7bb4 Implement hierarchical error handling for folder creation failures
This commit implements hierarchical error handling to improve sync robustness
when folder creation fails. Instead of failing the entire sync, the system now:

1. Tracks failed folder creations and automatically skips nested resources
2. Records skipped resources with FileActionIgnored (doesn't count toward error limits)
3. Allows other folder hierarchies to continue processing
4. Prevents folder deletion when child resource deletions fail

Key Changes:

- Add PathCreationError type to track which folder path failed
- Modify progress recorder to automatically detect and track failures via Record()
- Add IsNestedUnderFailedCreation() and HasFailedDeletionsUnder() checks
- Update full and incremental sync to skip nested resources after folder failures
- Deletions proceed even if parent folder creation failed (resource may exist from previous sync)
- FileActionIgnored results don't count toward error limits

Example behavior improvement:
Before: /monitoring folder creation fails → all nested resources fail → other folders never processed
After: /monitoring folder creation fails → nested resources ignored → /applications folder succeeds

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
2025-12-18 10:36:03 +01:00
118 changed files with 2889 additions and 737 deletions

1
.github/CODEOWNERS vendored
View File

@@ -24,6 +24,7 @@
/NOTICE.md @torkelo
/README.md @grafana/docs-grafana
/ROADMAP.md @torkelo
/SECURITY.md @grafana/security-team
/SUPPORT.md @torkelo
/WORKFLOW.md @torkelo
/contribute/ @grafana/grafana-community-support

View File

@@ -82,6 +82,14 @@ inputs:
description: Docker registry of produced images
default: docker.io
required: false
ubuntu-base:
type: string
default: 'ubuntu:22.04'
required: false
alpine-base:
type: string
default: 'alpine:3.22'
required: false
outputs:
dist-dir:
description: Directory where artifacts are placed
@@ -126,11 +134,13 @@ runs:
UBUNTU_TAG_FORMAT: ${{ inputs.docker-tag-format-ubuntu }}
CHECKSUM: ${{ inputs.checksum }}
VERIFY: ${{ inputs.verify }}
ALPINE_BASE: ${{ inputs.alpine-base }}
UBUNTU_BASE: ${{ inputs.ubuntu-base }}
with:
verb: run
dagger-flags: --verbose=0
version: 0.18.8
args: go run -C ${GRAFANA_PATH} ./pkg/build/cmd artifacts --artifacts ${ARTIFACTS} --grafana-dir=${GRAFANA_PATH} --enterprise-dir=${ENTERPRISE_PATH} --version=${VERSION} --patches-repo=${PATCHES_REPO} --patches-ref=${PATCHES_REF} --patches-path=${PATCHES_PATH} --build-id=${BUILD_ID} --tag-format="${TAG_FORMAT}" --ubuntu-tag-format="${UBUNTU_TAG_FORMAT}" --org=${DOCKER_ORG} --registry=${DOCKER_REGISTRY} --checksum=${CHECKSUM} --verify=${VERIFY} > $OUTFILE
args: go run -C ${GRAFANA_PATH} ./pkg/build/cmd artifacts --artifacts ${ARTIFACTS} --grafana-dir=${GRAFANA_PATH} --alpine-base=${ALPINE_BASE} --ubuntu-base=${UBUNTU_BASE} --enterprise-dir=${ENTERPRISE_PATH} --version=${VERSION} --patches-repo=${PATCHES_REPO} --patches-ref=${PATCHES_REF} --patches-path=${PATCHES_PATH} --build-id=${BUILD_ID} --tag-format="${TAG_FORMAT}" --ubuntu-tag-format="${UBUNTU_TAG_FORMAT}" --org=${DOCKER_ORG} --registry=${DOCKER_REGISTRY} --checksum=${CHECKSUM} --verify=${VERIFY} > $OUTFILE
- id: output
shell: bash
env:

View File

@@ -1,13 +0,0 @@
diff --git a/dist/builder-manager/index.js b/dist/builder-manager/index.js
index 3d7f9b213dae1801bda62b31db31b9113e382ccd..212501c63d20146c29db63fb0f6300c6779eecb5 100644
--- a/dist/builder-manager/index.js
+++ b/dist/builder-manager/index.js
@@ -1970,7 +1970,7 @@ var pa = /^\/($|\?)/, G, C, xt = /* @__PURE__ */ o(async (e) => {
bundle: !0,
minify: !0,
sourcemap: !1,
- conditions: ["browser", "module", "default"],
+ conditions: ["@grafana-app/source", "browser", "module", "default"],
jsxFactory: "React.createElement",
jsxFragment: "React.Fragment",
jsx: "transform",

29
SECURITY.md Normal file
View File

@@ -0,0 +1,29 @@
# Reporting security issues
If you think you have found a security vulnerability, we have two routes for reporting security issues.
Important: Whichever route you choose, we ask you to not disclose the vulnerability before it has been fixed and announced, unless you received a response from the Grafana Labs security team that you can do so.
[Full guidance on reporting a security issue can be found here](https://grafana.com/legal/report-a-security-issue/).
This product is in scope for our Bug Bounty Program. To submit a vulnerability report, please visit [Grafana Labs Bug Bounty page](https://app.intigriti.com/programs/grafanalabs/grafanaossbbp/detail) and follow the instructions provided. Our security team will review your submission and get back to you as soon as possible.
---
For products and services outside the scope of our bug bounty program, or if you do not wish to receive a bounty, you can report issues directly to us via email at security@grafana.com. This address can be used for all of Grafana Labs open source and commercial products (including but not limited to Grafana, Grafana Cloud, Grafana Enterprise, and grafana.com).
Please encrypt your message to us; please use our PGP key. The key fingerprint is:
225E 6A9B BB15 A37E 95EB 6312 C66A 51CC B44C 27E0
The key is available from [keyserver.ubuntu.com](https://keyserver.ubuntu.com/pks/lookup?search=0x225E6A9BBB15A37E95EB6312C66A51CCB44C27E0&fingerprint=on&op=index).
Grafana Labs will send you a response indicating the next steps in handling your report. After the initial reply to your report, the security team will keep you informed of the progress towards a fix and full announcement, and may ask for additional information or guidance.
**Important:** We ask you to not disclose the vulnerability before it have been fixed and announced, unless you received a response from the Grafana Labs security team that you can do so.
## Security announcements
We will post a summary, remediation, and mitigation details for any patch containing security fixes on the Grafana blog. The security announcement blog posts will be tagged with the [security tag](https://grafana.com/tags/security/).
You can also track security announcements via the [RSS feed](https://grafana.com/tags/security/index.xml).

View File

@@ -18,7 +18,6 @@ const webpackOptions = {
},
resolve: {
extensions: ['.ts', '.js'],
conditionNames: ['@grafana-app/source', '...'],
},
};

View File

@@ -4246,6 +4246,9 @@
}
},
"public/app/plugins/panel/geomap/components/DebugOverlay.tsx": {
"@grafana/no-aria-label-selectors": {
"count": 1
},
"react-prefer-function-component/react-prefer-function-component": {
"count": 1
}

View File

@@ -40,9 +40,6 @@ const esModules = [
module.exports = {
verbose: false,
testEnvironment: 'jsdom',
testEnvironmentOptions: {
customExportConditions: ['@grafana-app/source', 'browser'],
},
transform: {
'^.+\\.(ts|tsx|js|jsx)$': [require.resolve('ts-jest')],
},

View File

@@ -26,10 +26,10 @@
"e2e:enterprise": "./e2e/start-and-run-suite enterprise",
"e2e:enterprise:dev": "./e2e/start-and-run-suite enterprise dev",
"e2e:enterprise:debug": "./e2e/start-and-run-suite enterprise debug",
"e2e:playwright": "NODE_OPTIONS='-C @grafana-app/source' yarn playwright test --grep-invert @cloud-plugins",
"e2e:playwright:cloud-plugins": "NODE_OPTIONS='-C @grafana-app/source' yarn playwright test --grep @cloud-plugins",
"e2e:playwright:storybook": "NODE_OPTIONS='-C @grafana-app/source' yarn playwright test -c playwright.storybook.config.ts",
"e2e:acceptance": "NODE_OPTIONS='-C @grafana-app/source' yarn playwright test --grep @acceptance",
"e2e:playwright": "yarn playwright test --grep-invert @cloud-plugins",
"e2e:playwright:cloud-plugins": "yarn playwright test --grep @cloud-plugins",
"e2e:playwright:storybook": "yarn playwright test -c playwright.storybook.config.ts",
"e2e:acceptance": "yarn playwright test --grep @acceptance",
"e2e:storybook": "PORT=9001 ./e2e/run-suite storybook true",
"e2e:plugin:build": "nx run-many -t build --projects='@test-plugins/*'",
"e2e:plugin:build:dev": "nx run-many -t dev --projects='@test-plugins/*' --maxParallel=100",
@@ -63,7 +63,7 @@
"storybook": "yarn workspace @grafana/ui storybook --ci",
"storybook:build": "yarn workspace @grafana/ui storybook:build",
"themes-schema": "typescript-json-schema ./tsconfig.json NewThemeOptions --include 'packages/grafana-data/src/themes/createTheme.ts' --out public/app/features/theme-playground/schema.generated.json",
"themes-generate": "yarn themes-schema && esbuild --target=es6 ./scripts/cli/generateSassVariableFiles.ts --bundle --conditions=@grafana-app/source --platform=node --tsconfig=./scripts/cli/tsconfig.json | node",
"themes-generate": "yarn themes-schema && esbuild --target=es6 ./scripts/cli/generateSassVariableFiles.ts --bundle --platform=node --tsconfig=./scripts/cli/tsconfig.json | node",
"themes:usage": "eslint . --ignore-pattern '*.test.ts*' --ignore-pattern '*.spec.ts*' --cache --plugin '@grafana' --rule '{ @grafana/theme-token-usage: \"error\" }'",
"typecheck": "tsc --noEmit && yarn run packages:typecheck",
"plugins:build-bundled": "echo 'bundled plugins are no longer supported'",
@@ -295,8 +295,8 @@
"@grafana/plugin-ui": "^0.11.1",
"@grafana/prometheus": "workspace:*",
"@grafana/runtime": "workspace:*",
"@grafana/scenes": "^6.51.0",
"@grafana/scenes-react": "^6.51.0",
"@grafana/scenes": "6.50.0",
"@grafana/scenes-react": "6.50.0",
"@grafana/schema": "workspace:*",
"@grafana/sql": "workspace:*",
"@grafana/ui": "workspace:*",
@@ -460,8 +460,7 @@
"tmp@npm:^0.0.33": "~0.2.1",
"js-yaml@npm:4.1.0": "^4.1.0",
"js-yaml@npm:=4.1.0": "^4.1.0",
"nodemailer": "7.0.7",
"@storybook/core@npm:8.6.2": "patch:@storybook/core@npm%3A8.6.2#~/.yarn/patches/@storybook-core-npm-8.6.2-8c752112c0.patch"
"nodemailer": "7.0.7"
},
"workspaces": {
"packages": [

View File

@@ -2,32 +2,13 @@
## Exporting code conventions
All the `@grafana` packages in this repo (except `@grafana/schema`) make use of `exports` in package.json to define entrypoints that Grafana core and Grafana plugins can access. Exports can also be used to restrict access to internal files in packages.
`@grafana/ui`, `@grafana/data` and `@grafana/runtime` makes use of `exports` in package.json to define three entrypoints that Grafana core and Grafana plugins can access. Before exposing anything in these packages please consider the table below to better understand the use case of each export.
Package authors are free to create as many exports as they like but should consider the following points:
1. Resolution of source code within this repo is handled by the [customCondition](https://www.typescriptlang.org/tsconfig/#customConditions) `@grafana-app/source`. This allows the frontend tooling in this repo to resolve to the source code preventing the need to build all the packages up front. When adding exports it is important to add an entry for the custom condition as the first item. All other entries should point to the built, bundled files. For example:
```json
"exports": {
".": {
"@grafana-app/source": "./src/index.ts",
"types": "./dist/types/index.d.ts",
"import": "./dist/esm/index.mjs",
"require": "./dist/cjs/index.cjs"
}
}
```
2. If you add exports to your package you must export the `package.json` file.
3. Before exposing anything in these packages please consider the table below to better understand the conventions we have put in place for most of the packages in this repository.
| Export Name | Import Path | Description | Available to Grafana | Available to plugins |
| ------------ | ---------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------- | -------------------- |
| `./` | `@grafana/ui` | The public API entrypoint. If the code is stable and you want to share it everywhere, this is the place to export it. | ✅ | ✅ |
| `./unstable` | `@grafana/ui/unstable` | The public API entrypoint for all experimental code. If you want to iterate and test code from Grafana and plugins, this is the place to export it. | ✅ | ✅ |
| `./internal` | `@grafana/ui/internal` | The private API entrypoint for internal code shared with Grafana. If you want to co-locate code in a package with it's public API but only want the Grafana application to access it, this is the place to export it. | ✅ | ❌ |
| Export Name | Import Path | Description | Available to Grafana | Available to plugins |
| ------------ | ---------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------- | -------------------- |
| `./` | `@grafana/ui` | The public API entrypoint. If the code is stable and you want to share it everywhere, this is the place to export it. | ✅ | ✅ |
| `./unstable` | `@grafana/ui/unstable` | The public API entrypoint for all experimental code. If you want to iterate and test code from Grafana and plugins, this is the place to export it. | ✅ | ✅ |
| `./internal` | `@grafana/ui/internal` | The private API entrypoint for internal code shared with Grafana. If you need to import code in Grafana but don't want to expose it to plugins, this is the place to export it. | ✅ | ❌ |
## Versioning

View File

@@ -17,34 +17,32 @@
"url": "http://github.com/grafana/grafana.git",
"directory": "packages/grafana-alerting"
},
"main": "./dist/cjs/index.cjs",
"module": "./dist/esm/index.mjs",
"types": "./dist/types/index.d.ts",
"main": "src/index.ts",
"types": "src/index.ts",
"module": "src/index.ts",
"exports": {
"./package.json": "./package.json",
".": {
"@grafana-app/source": "./src/index.ts",
"types": "./dist/types/index.d.ts",
"import": "./dist/esm/index.mjs",
"require": "./dist/cjs/index.cjs"
},
"./unstable": {
"@grafana-app/source": "./src/unstable.ts",
"types": "./dist/types/unstable.d.ts",
"import": "./dist/esm/unstable.mjs",
"require": "./dist/cjs/unstable.cjs"
"import": "./src/index.ts",
"require": "./src/index.ts"
},
"./internal": {
"@grafana-app/source": "./src/internal.ts"
"import": "./src/internal.ts",
"require": "./src/internal.ts"
},
"./unstable": {
"import": "./src/unstable.ts",
"require": "./src/unstable.ts"
},
"./testing": {
"@grafana-app/source": "./src/testing.ts",
"types": "./dist/types/testing.d.ts",
"import": "./dist/esm/testing.mjs",
"require": "./dist/cjs/testing.cjs"
"import": "./src/testing.ts",
"require": "./src/testing.ts"
}
},
"publishConfig": {
"main": "./dist/cjs/index.cjs",
"module": "./dist/esm/index.mjs",
"types": "./dist/types/index.d.ts",
"access": "public"
},
"files": [
@@ -59,8 +57,8 @@
"clean": "rimraf ./dist ./compiled ./unstable ./testing ./package.tgz",
"typecheck": "tsc --emitDeclarationOnly false --noEmit",
"codegen": "rtk-query-codegen-openapi ./scripts/codegen.ts",
"prepack": "cp package.json package.json.bak && node ../../scripts/prepare-npm-package.js",
"postpack": "mv package.json.bak package.json",
"prepack": "cp package.json package.json.bak && ALIAS_PACKAGE_NAME=testing,unstable node ../../scripts/prepare-npm-package.js",
"postpack": "mv package.json.bak package.json && rimraf ./unstable ./testing",
"i18n-extract": "i18next-cli extract --sync-primary"
},
"devDependencies": {

View File

@@ -9,19 +9,19 @@ export default [
{
input: entryPoint,
plugins,
output: [cjsOutput(pkg, 'grafana-alerting'), esmOutput(pkg, 'grafana-alerting')],
output: [cjsOutput(pkg), esmOutput(pkg, 'grafana-alerting')],
treeshake: false,
},
{
input: 'src/unstable.ts',
plugins,
output: [cjsOutput(pkg, 'grafana-alerting'), esmOutput(pkg, 'grafana-alerting')],
output: [cjsOutput(pkg), esmOutput(pkg, 'grafana-alerting')],
treeshake: false,
},
{
input: 'src/testing.ts',
plugins,
output: [cjsOutput(pkg, 'grafana-alerting'), esmOutput(pkg, 'grafana-alerting')],
output: [cjsOutput(pkg), esmOutput(pkg, 'grafana-alerting')],
treeshake: false,
},
];

View File

@@ -15,121 +15,88 @@
"url": "https://github.com/grafana/grafana.git",
"directory": "packages/grafana-api-clients"
},
"main": "./dist/cjs/index.cjs",
"module": "./dist/esm/index.mjs",
"types": "./dist/types/index.d.ts",
"main": "src/index.ts",
"module": "src/index.ts",
"types": "src/index.ts",
"exports": {
"./package.json": "./package.json",
".": {
"@grafana-app/source": "./src/index.ts",
"types": "./dist/types/index.d.ts",
"import": "./dist/esm/index.mjs",
"require": "./dist/cjs/index.cjs"
"import": "./src/index.ts",
"require": "./src/index.ts"
},
"./rtkq": {
"@grafana-app/source": "./src/clients/rtkq/index.ts",
"types": "./dist/types/clients/rtkq/index.d.ts",
"import": "./dist/esm/clients/rtkq/index.mjs",
"require": "./dist/cjs/clients/rtkq/index.cjs"
"import": "./src/clients/rtkq/index.ts",
"require": "./src/clients/rtkq/index.ts"
},
"./rtkq/advisor/v0alpha1": {
"@grafana-app/source": "./src/clients/rtkq/advisor/v0alpha1/index.ts",
"types": "./dist/types/clients/rtkq/advisor/v0alpha1/index.d.ts",
"import": "./dist/esm/clients/rtkq/advisor/v0alpha1/index.mjs",
"require": "./dist/cjs/clients/rtkq/advisor/v0alpha1/index.cjs"
},
"./rtkq/collections/v1alpha1": {
"@grafana-app/source": "./src/clients/rtkq/collections/v1alpha1/index.ts",
"types": "./dist/types/clients/rtkq/collections/v1alpha1/index.d.ts",
"import": "./dist/esm/clients/rtkq/collections/v1alpha1/index.mjs",
"require": "./dist/cjs/clients/rtkq/collections/v1alpha1/index.cjs"
"import": "./src/clients/rtkq/advisor/v0alpha1/index.ts",
"require": "./src/clients/rtkq/advisor/v0alpha1/index.ts"
},
"./rtkq/correlations/v0alpha1": {
"@grafana-app/source": "./src/clients/rtkq/correlations/v0alpha1/index.ts",
"types": "./dist/types/clients/rtkq/correlations/v0alpha1/index.d.ts",
"import": "./dist/esm/clients/rtkq/correlations/v0alpha1/index.mjs",
"require": "./dist/cjs/clients/rtkq/correlations/v0alpha1/index.cjs"
"import": "./src/clients/rtkq/correlations/v0alpha1/index.ts",
"require": "./src/clients/rtkq/correlations/v0alpha1/index.ts"
},
"./rtkq/dashboard/v0alpha1": {
"@grafana-app/source": "./src/clients/rtkq/dashboard/v0alpha1/index.ts",
"types": "./dist/types/clients/rtkq/dashboard/v0alpha1/index.d.ts",
"import": "./dist/esm/clients/rtkq/dashboard/v0alpha1/index.mjs",
"require": "./dist/cjs/clients/rtkq/dashboard/v0alpha1/index.cjs"
"import": "./src/clients/rtkq/dashboard/v0alpha1/index.ts",
"require": "./src/clients/rtkq/dashboard/v0alpha1/index.ts"
},
"./rtkq/folder/v1beta1": {
"@grafana-app/source": "./src/clients/rtkq/folder/v1beta1/index.ts",
"types": "./dist/types/clients/rtkq/folder/v1beta1/index.d.ts",
"import": "./dist/esm/clients/rtkq/folder/v1beta1/index.mjs",
"require": "./dist/cjs/clients/rtkq/folder/v1beta1/index.cjs"
"import": "./src/clients/rtkq/folder/v1beta1/index.ts",
"require": "./src/clients/rtkq/folder/v1beta1/index.ts"
},
"./rtkq/iam/v0alpha1": {
"@grafana-app/source": "./src/clients/rtkq/iam/v0alpha1/index.ts",
"types": "./dist/types/clients/rtkq/iam/v0alpha1/index.d.ts",
"import": "./dist/esm/clients/rtkq/iam/v0alpha1/index.mjs",
"require": "./dist/cjs/clients/rtkq/iam/v0alpha1/index.cjs"
"import": "./src/clients/rtkq/iam/v0alpha1/index.ts",
"require": "./src/clients/rtkq/iam/v0alpha1/index.ts"
},
"./rtkq/legacy": {
"@grafana-app/source": "./src/clients/rtkq/legacy/index.ts",
"types": "./dist/types/clients/rtkq/legacy/index.d.ts",
"import": "./dist/esm/clients/rtkq/legacy/index.mjs",
"require": "./dist/cjs/clients/rtkq/legacy/index.cjs"
"import": "./src/clients/rtkq/legacy/index.ts",
"require": "./src/clients/rtkq/legacy/index.ts"
},
"./rtkq/legacy/migrate-to-cloud": {
"@grafana-app/source": "./src/clients/rtkq/migrate-to-cloud/index.ts",
"types": "./dist/types/clients/rtkq/migrate-to-cloud/index.d.ts",
"import": "./dist/esm/clients/rtkq/migrate-to-cloud/index.mjs",
"require": "./dist/cjs/clients/rtkq/migrate-to-cloud/index.cjs"
"import": "./src/clients/rtkq/migrate-to-cloud/index.ts",
"require": "./src/clients/rtkq/migrate-to-cloud/index.ts"
},
"./rtkq/legacy/preferences": {
"@grafana-app/source": "./src/clients/rtkq/preferences/user/index.ts",
"types": "./dist/types/clients/rtkq/preferences/user/index.d.ts",
"import": "./dist/esm/clients/rtkq/preferences/user/index.mjs",
"require": "./dist/cjs/clients/rtkq/preferences/user/index.cjs"
"import": "./src/clients/rtkq/preferences/user/index.ts",
"require": "./src/clients/rtkq/preferences/user/index.ts"
},
"./rtkq/legacy/user": {
"@grafana-app/source": "./src/clients/rtkq/user/index.ts",
"types": "./dist/types/clients/rtkq/user/index.d.ts",
"import": "./dist/esm/clients/rtkq/user/index.mjs",
"require": "./dist/cjs/clients/rtkq/user/index.cjs"
"import": "./src/clients/rtkq/user/index.ts",
"require": "./src/clients/rtkq/user/index.ts"
},
"./rtkq/playlist/v0alpha1": {
"@grafana-app/source": "./src/clients/rtkq/playlist/v0alpha1/index.ts",
"types": "./dist/types/clients/rtkq/playlist/v0alpha1/index.d.ts",
"import": "./dist/esm/clients/rtkq/playlist/v0alpha1/index.mjs",
"require": "./dist/cjs/clients/rtkq/playlist/v0alpha1/index.cjs"
"import": "./src/clients/rtkq/playlist/v0alpha1/index.ts",
"require": "./src/clients/rtkq/playlist/v0alpha1/index.ts"
},
"./rtkq/preferences/v1alpha1": {
"@grafana-app/source": "./src/clients/rtkq/preferences/v1alpha1/index.ts",
"types": "./dist/types/clients/rtkq/preferences/v1alpha1/index.d.ts",
"import": "./dist/esm/clients/rtkq/preferences/v1alpha1/index.mjs",
"require": "./dist/cjs/clients/rtkq/preferences/v1alpha1/index.cjs"
"import": "./src/clients/rtkq/preferences/v1alpha1/index.ts",
"require": "./src/clients/rtkq/preferences/v1alpha1/index.ts"
},
"./rtkq/collections/v1alpha1": {
"import": "./src/clients/rtkq/collections/v1alpha1/index.ts",
"require": "./src/clients/rtkq/collections/v1alpha1/index.ts"
},
"./rtkq/provisioning/v0alpha1": {
"@grafana-app/source": "./src/clients/rtkq/provisioning/v0alpha1/index.ts",
"types": "./dist/types/clients/rtkq/provisioning/v0alpha1/index.d.ts",
"import": "./dist/esm/clients/rtkq/provisioning/v0alpha1/index.mjs",
"require": "./dist/cjs/clients/rtkq/provisioning/v0alpha1/index.cjs"
"import": "./src/clients/rtkq/provisioning/v0alpha1/index.ts",
"require": "./src/clients/rtkq/provisioning/v0alpha1/index.ts"
},
"./rtkq/shorturl/v1beta1": {
"@grafana-app/source": "./src/clients/rtkq/shorturl/v1beta1/index.ts",
"types": "./dist/types/clients/rtkq/shorturl/v1beta1/index.d.ts",
"import": "./dist/esm/clients/rtkq/shorturl/v1beta1/index.mjs",
"require": "./dist/cjs/clients/rtkq/shorturl/v1beta1/index.cjs"
"import": "./src/clients/rtkq/shorturl/v1beta1/index.ts",
"require": "./src/clients/rtkq/shorturl/v1beta1/index.ts"
},
"./rtkq/historian.alerting/v0alpha1": {
"@grafana-app/source": "./src/clients/rtkq/historian.alerting/v0alpha1/index.ts",
"types": "./dist/types/clients/rtkq/historian.alerting/v0alpha1/index.d.ts",
"import": "./dist/esm/clients/rtkq/historian.alerting/v0alpha1/index.mjs",
"require": "./dist/cjs/clients/rtkq/historian.alerting/v0alpha1/index.cjs"
"import": "./src/clients/rtkq/historian.alerting/v0alpha1/index.ts",
"require": "./src/clients/rtkq/historian.alerting/v0alpha1/index.ts"
},
"./rtkq/logsdrilldown/v1alpha1": {
"@grafana-app/source": "./src/clients/rtkq/logsdrilldown/v1alpha1/index.ts",
"types": "./dist/types/clients/rtkq/logsdrilldown/v1alpha1/index.d.ts",
"import": "./dist/esm/clients/rtkq/logsdrilldown/v1alpha1/index.mjs",
"require": "./dist/cjs/clients/rtkq/logsdrilldown/v1alpha1/index.cjs"
"import": "./src/clients/rtkq/logsdrilldown/v1alpha1/index.ts",
"require": "./src/clients/rtkq/logsdrilldown/v1alpha1/index.ts"
}
},
"publishConfig": {
"main": "./dist/cjs/index.cjs",
"module": "./dist/esm/index.mjs",
"types": "./dist/types/index.d.ts",
"access": "public"
},
"files": [

View File

@@ -5,17 +5,35 @@ import { cjsOutput, entryPoint, esmOutput, plugins } from '../rollup.config.part
const rq = createRequire(import.meta.url);
const pkg = rq('./package.json');
const apiClients = Object.entries<{ import: string; require: string }>(pkg.exports).filter(([key]) =>
key.startsWith('./rtkq/')
);
const apiClientConfigs = apiClients.map(([name, { import: importPath }]) => {
const baseCjsOutput = cjsOutput(pkg);
const entryFileNames = name.replace('./', '') + '.cjs';
const cjsOutputConfig = { ...baseCjsOutput, entryFileNames };
return {
input: importPath.replace('./', ''),
plugins,
output: [cjsOutputConfig, esmOutput(pkg, 'grafana-api-clients')],
treeshake: false,
};
});
export default [
{
input: entryPoint,
plugins,
output: [cjsOutput(pkg, 'grafana-api-clients'), esmOutput(pkg, 'grafana-api-clients')],
output: [cjsOutput(pkg), esmOutput(pkg, 'grafana-api-clients')],
treeshake: false,
},
{
input: 'src/clients/rtkq/index.ts',
plugins,
output: [cjsOutput(pkg, 'grafana-api-clients'), esmOutput(pkg, 'grafana-api-clients')],
output: [cjsOutput(pkg), esmOutput(pkg, 'grafana-api-clients')],
treeshake: false,
},
...apiClientConfigs,
];

View File

@@ -143,10 +143,8 @@ export const updatePackageJsonExports =
// Create the new export entry
const newExportKey = `./rtkq/${groupName}/${version}`;
const newExportValue = {
'@grafana-app/source': `./src/clients/rtkq/${groupName}/${version}/index.ts`,
types: `./dist/types/clients/rtkq/${groupName}/${version}/index.d.ts`,
import: `./dist/esm/clients/rtkq/${groupName}/${version}/index.mjs`,
require: `./dist/cjs/clients/rtkq/${groupName}/${version}/index.cjs`,
import: `./src/clients/rtkq/${groupName}/${version}/index.ts`,
require: `./src/clients/rtkq/${groupName}/${version}/index.ts`,
};
// Check if export already exists

View File

@@ -8,8 +8,7 @@
"emitDeclarationOnly": true,
"isolatedModules": true,
"rootDirs": ["."],
"allowImportingTsExtensions": true,
"moduleResolution": "bundler"
"allowImportingTsExtensions": true
},
"exclude": ["dist/**/*"],
"include": [
@@ -18,12 +17,5 @@
"../grafana-ui/src/types/*.d.ts",
"../grafana-i18n/src/types/*.d.ts",
"src/**/*.ts*"
],
"ts-node": {
"swc": true,
"compilerOptions": {
"module": "es2020",
"moduleResolution": "Bundler"
}
}
]
}

View File

@@ -13,31 +13,32 @@
"url": "http://github.com/grafana/grafana.git",
"directory": "packages/grafana-data"
},
"main": "./dist/cjs/index.cjs",
"module": "./dist/esm/index.mjs",
"types": "./dist/types/index.d.ts",
"main": "src/index.ts",
"types": "src/index.ts",
"module": "src/index.ts",
"exports": {
"./package.json": "./package.json",
".": {
"@grafana-app/source": "./src/index.ts",
"types": "./dist/types/index.d.ts",
"import": "./dist/esm/index.mjs",
"require": "./dist/cjs/index.cjs"
},
"./unstable": {
"@grafana-app/source": "./src/unstable.ts",
"types": "./dist/types/unstable.d.ts",
"import": "./dist/esm/unstable.mjs",
"require": "./dist/cjs/unstable.cjs"
"import": "./src/index.ts",
"require": "./src/index.ts"
},
"./internal": {
"@grafana-app/source": "./src/internal/index.ts"
"import": "./src/internal/index.ts",
"require": "./src/internal/index.ts"
},
"./unstable": {
"import": "./src/unstable.ts",
"require": "./src/unstable.ts"
},
"./test": {
"@grafana-app/source": "./test/index.ts"
"import": "./test/index.ts",
"require": "./test/index.ts"
}
},
"publishConfig": {
"main": "./dist/cjs/index.cjs",
"module": "./dist/esm/index.mjs",
"types": "./dist/types/index.d.ts",
"access": "public"
},
"files": [
@@ -50,8 +51,8 @@
"build": "tsc -p ./tsconfig.build.json && rollup -c rollup.config.ts --configPlugin esbuild",
"clean": "rimraf ./dist ./compiled ./unstable ./package.tgz",
"typecheck": "tsc --emitDeclarationOnly false --noEmit",
"prepack": "cp package.json package.json.bak && node ../../scripts/prepare-npm-package.js",
"postpack": "mv package.json.bak package.json"
"prepack": "cp package.json package.json.bak && ALIAS_PACKAGE_NAME=unstable node ../../scripts/prepare-npm-package.js",
"postpack": "mv package.json.bak package.json && rimraf ./unstable"
},
"dependencies": {
"@braintree/sanitize-url": "7.0.1",

View File

@@ -9,13 +9,13 @@ export default [
{
input: entryPoint,
plugins,
output: [cjsOutput(pkg, 'grafana-data'), esmOutput(pkg, 'grafana-data')],
output: [cjsOutput(pkg), esmOutput(pkg, 'grafana-data')],
treeshake: false,
},
{
input: 'src/unstable.ts',
plugins,
output: [cjsOutput(pkg, 'grafana-data'), esmOutput(pkg, 'grafana-data')],
output: [cjsOutput(pkg), esmOutput(pkg, 'grafana-data')],
treeshake: false,
},
];

View File

@@ -3,12 +3,10 @@
"compilerOptions": {
"declaration": true,
"jsx": "react-jsx",
"baseUrl": "./",
"declarationDir": "./dist/types",
"emitDeclarationOnly": true,
"isolatedModules": true,
"rootDirs": ["."],
"moduleResolution": "bundler"
"rootDirs": ["."]
},
"exclude": ["dist/**/*"],
"include": [

View File

@@ -16,19 +16,12 @@
"url": "http://github.com/grafana/grafana.git",
"directory": "packages/grafana-e2e-selectors"
},
"main": "./dist/cjs/index.cjs",
"module": "./dist/esm/index.mjs",
"types": "./dist/types/index.d.ts",
"exports": {
"./package.json": "./package.json",
".": {
"@grafana-app/source": "./src/index.ts",
"types": "./dist/types/index.d.ts",
"import": "./dist/esm/index.mjs",
"require": "./dist/cjs/index.cjs"
}
},
"main": "src/index.ts",
"types": "src/index.ts",
"publishConfig": {
"main": "./dist/cjs/index.cjs",
"module": "./dist/esm/index.mjs",
"types": "./dist/types/index.d.ts",
"access": "public"
},
"files": [

View File

@@ -9,7 +9,7 @@ export default [
{
input: entryPoint,
plugins,
output: [cjsOutput(pkg, 'grafana-e2e-selectors'), esmOutput(pkg, 'grafana-e2e-selectors')],
output: [cjsOutput(pkg), esmOutput(pkg, 'grafana-e2e-selectors')],
treeshake: false,
},
];

View File

@@ -1332,7 +1332,6 @@ export const versionedComponents = {
},
DebugOverlay: {
wrapper: {
'12.3.0': 'data-testid debug-overlay-wrapper',
'9.2.0': 'debug-overlay',
},
},

View File

@@ -5,8 +5,7 @@
"declarationDir": "./dist/types",
"emitDeclarationOnly": true,
"isolatedModules": true,
"rootDirs": ["."],
"moduleResolution": "bundler"
"rootDirs": ["."]
},
"exclude": ["dist/**/*"],
"include": ["src/**/*.ts"]

View File

@@ -16,19 +16,12 @@
"url": "http://github.com/grafana/grafana.git",
"directory": "packages/grafana-flamegraph"
},
"main": "./dist/cjs/index.cjs",
"module": "./dist/esm/index.mjs",
"types": "./dist/types/index.d.ts",
"exports": {
"./package.json": "./package.json",
".": {
"@grafana-app/source": "./src/index.ts",
"types": "./dist/types/index.d.ts",
"import": "./dist/esm/index.mjs",
"require": "./dist/cjs/index.cjs"
}
},
"main": "src/index.ts",
"types": "src/index.ts",
"publishConfig": {
"main": "./dist/cjs/index.cjs",
"module": "./dist/esm/index.mjs",
"types": "./dist/types/index.d.ts",
"access": "public"
},
"files": [

View File

@@ -9,7 +9,7 @@ export default [
{
input: entryPoint,
plugins,
output: [cjsOutput(pkg, 'grafana-flamegraph'), esmOutput(pkg, 'grafana-flamegraph')],
output: [cjsOutput(pkg), esmOutput(pkg, 'grafana-flamegraph')],
treeshake: false,
},
];

View File

@@ -7,8 +7,7 @@
"declarationDir": "./dist/types",
"emitDeclarationOnly": true,
"isolatedModules": true,
"rootDirs": ["."],
"moduleResolution": "bundler"
"rootDirs": ["."]
},
"exclude": ["dist/**/*"],
"include": ["src/**/*.ts*", "../../public/app/types/*.d.ts", "../grafana-ui/src/types/*.d.ts"]

View File

@@ -14,32 +14,33 @@
"url": "http://github.com/grafana/grafana.git",
"directory": "packages/grafana-i18n"
},
"main": "./dist/cjs/index.cjs",
"module": "./dist/esm/index.mjs",
"types": "./dist/types/index.d.ts",
"main": "src/index.ts",
"types": "src/index.ts",
"module": "src/index.ts",
"exports": {
"./package.json": "./package.json",
".": {
"@grafana-app/source": "./src/index.ts",
"types": "./dist/types/index.d.ts",
"import": "./dist/esm/index.mjs",
"require": "./dist/cjs/index.cjs"
"import": "./src/index.ts",
"require": "./src/index.ts"
},
"./internal": {
"@grafana-app/source": "./src/internal/index.ts"
"import": "./src/internal/index.ts",
"require": "./src/internal/index.ts"
},
"./eslint-plugin": {
"@grafana-app/source": "./src/eslint/index.cjs",
"types": "./src/eslint/index.d.ts",
"default": "./src/eslint/index.cjs"
"import": "./src/eslint/index.cjs",
"require": "./src/eslint/index.cjs"
}
},
"publishConfig": {
"main": "./dist/cjs/index.cjs",
"module": "./dist/esm/index.mjs",
"types": "./dist/types/index.d.ts",
"access": "public"
},
"files": [
"dist",
"src/eslint/**/*",
"./README.md",
"./CHANGELOG.md",
"LICENSE_APACHE2"

View File

@@ -1,4 +1,5 @@
import { createRequire } from 'node:module';
import copy from 'rollup-plugin-copy';
import { entryPoint, plugins, esmOutput, cjsOutput } from '../rollup.config.parts';
@@ -8,8 +9,13 @@ const pkg = rq('./package.json');
export default [
{
input: entryPoint,
plugins,
output: [cjsOutput(pkg, 'grafana-i18n'), esmOutput(pkg, 'grafana-i18n')],
plugins: [
...plugins,
copy({
targets: [{ src: 'src/eslint', dest: 'dist' }],
}),
],
output: [cjsOutput(pkg), esmOutput(pkg, 'grafana-i18n')],
treeshake: false,
},
];

View File

@@ -6,8 +6,7 @@
"declarationDir": "./dist/types",
"emitDeclarationOnly": true,
"isolatedModules": true,
"rootDirs": ["."],
"moduleResolution": "bundler"
"rootDirs": ["."]
},
"exclude": ["dist/**/*"],
"include": ["src/**/*.ts*"]

View File

@@ -8,8 +8,7 @@
"emitDeclarationOnly": true,
"isolatedModules": true,
"allowJs": true,
"rootDirs": ["."],
"moduleResolution": "bundler"
"rootDirs": ["."]
},
"exclude": ["dist/**/*"],
"include": [

View File

@@ -17,9 +17,6 @@ export default {
setupFiles: ['jest-canvas-mock'],
setupFilesAfterEnv: ['<rootDir>/jest-setup.js'],
testEnvironment: 'jsdom',
testEnvironmentOptions: {
customExportConditions: ['@grafana-app/source', 'browser'],
},
testMatch: ['<rootDir>/**/__tests__/**/*.{js,jsx,ts,tsx}', '<rootDir>/**/*.{spec,test,jest}.{js,jsx,ts,tsx}'],
transform: {
'^.+\\.(t|j)sx?$': [

View File

@@ -1,13 +1,12 @@
{
"compilerOptions": {
"allowImportingTsExtensions": true,
"alwaysStrict": true,
"customConditions": ["@grafana-app/source"],
"declaration": false,
"jsx": "react-jsx",
"alwaysStrict": true,
"declaration": false,
"resolveJsonModule": true,
"moduleResolution": "bundler",
"noEmit": true,
"resolveJsonModule": true
"allowImportingTsExtensions": true
},
"extends": "@grafana/tsconfig",
"exclude": ["**/*.test.ts", "**/*.test.tsx", "**/*.spec.ts", "**/*.spec.tsx"],

View File

@@ -312,7 +312,6 @@ const config = async (env: Env): Promise<Configuration> => {
resolve: {
extensions: ['.ts', '.tsx', '.js', '.jsx'],
conditionNames: ['@grafana-app/source', '...'],
unsafeCache: true,
},

View File

@@ -15,18 +15,8 @@
"url": "http://github.com/grafana/grafana.git",
"directory": "packages/grafana-prometheus"
},
"main": "./dist/cjs/index.cjs",
"module": "./dist/esm/index.mjs",
"types": "./dist/types/index.d.ts",
"exports": {
"./package.json": "./package.json",
".": {
"@grafana-app/source": "./src/index.ts",
"types": "./dist/types/index.d.ts",
"import": "./dist/esm/index.mjs",
"require": "./dist/cjs/index.cjs"
}
},
"main": "src/index.ts",
"types": "src/index.ts",
"files": [
"./dist",
"./README.md",
@@ -34,6 +24,9 @@
"./LICENSE_AGPL"
],
"publishConfig": {
"main": "./dist/cjs/index.cjs",
"module": "./dist/esm/index.mjs",
"types": "./dist/types/index.d.ts",
"access": "public"
},
"scripts": {

View File

@@ -12,7 +12,7 @@ export default [
{
input: entryPoint,
plugins: [...plugins, image(), json(), dynamicImportVars()],
output: [cjsOutput(pkg, 'grafana-prometheus'), esmOutput(pkg, 'grafana-prometheus')],
output: [cjsOutput(pkg), esmOutput(pkg, 'grafana-prometheus')],
treeshake: false,
},
];

View File

@@ -14,28 +14,28 @@
"url": "http://github.com/grafana/grafana.git",
"directory": "packages/grafana-runtime"
},
"main": "./dist/cjs/index.cjs",
"module": "./dist/esm/index.mjs",
"types": "./dist/types/index.d.ts",
"main": "src/index.ts",
"types": "src/index.ts",
"module": "src/index.ts",
"exports": {
"./package.json": "./package.json",
".": {
"@grafana-app/source": "./src/index.ts",
"types": "./dist/types/index.d.ts",
"import": "./dist/esm/index.mjs",
"require": "./dist/cjs/index.cjs"
},
"./unstable": {
"@grafana-app/source": "./src/unstable.ts",
"types": "./dist/types/unstable.d.ts",
"import": "./dist/esm/unstable.mjs",
"require": "./dist/cjs/unstable.cjs"
"import": "./src/index.ts",
"require": "./src/index.ts"
},
"./internal": {
"@grafana-app/source": "./src/internal/index.ts"
"import": "./src/internal/index.ts",
"require": "./src/internal/index.ts"
},
"./unstable": {
"import": "./src/unstable.ts",
"require": "./src/unstable.ts"
}
},
"publishConfig": {
"main": "./dist/cjs/index.cjs",
"module": "./dist/esm/index.mjs",
"types": "./dist/types/index.d.ts",
"access": "public"
},
"files": [
@@ -49,8 +49,8 @@
"bundle": "rollup -c rollup.config.ts --configPlugin esbuild",
"clean": "rimraf ./dist ./compiled ./unstable ./package.tgz",
"typecheck": "tsc --emitDeclarationOnly false --noEmit",
"prepack": "cp package.json package.json.bak && node ../../scripts/prepare-npm-package.js",
"postpack": "mv package.json.bak package.json"
"prepack": "cp package.json package.json.bak && ALIAS_PACKAGE_NAME=unstable node ../../scripts/prepare-npm-package.js",
"postpack": "mv package.json.bak package.json && rimraf ./unstable"
},
"dependencies": {
"@grafana/data": "12.4.0-pre",

View File

@@ -9,13 +9,13 @@ export default [
{
input: entryPoint,
plugins,
output: [cjsOutput(pkg, 'grafana-runtime'), esmOutput(pkg, 'grafana-runtime')],
output: [cjsOutput(pkg), esmOutput(pkg, 'grafana-runtime')],
treeshake: false,
},
{
input: 'src/unstable.ts',
plugins,
output: [cjsOutput(pkg, 'grafana-runtime'), esmOutput(pkg, 'grafana-runtime')],
output: [cjsOutput(pkg), esmOutput(pkg, 'grafana-runtime')],
treeshake: false,
},
];

View File

@@ -9,15 +9,7 @@ export * from './analytics/types';
export { loadPluginCss, type PluginCssOptions, setPluginImportUtils, getPluginImportUtils } from './utils/plugin';
export { reportMetaAnalytics, reportInteraction, reportPageview, reportExperimentView } from './analytics/utils';
export { featureEnabled } from './utils/licensing';
export {
logInfo,
logDebug,
logWarning,
logError,
createMonitoringLogger,
logMeasurement,
type MonitoringLogger,
} from './utils/logging';
export { logInfo, logDebug, logWarning, logError, createMonitoringLogger, logMeasurement } from './utils/logging';
export {
DataSourceWithBackend,
HealthCheckError,

View File

@@ -8,8 +8,7 @@
"emitDeclarationOnly": true,
"isolatedModules": true,
"allowJs": true,
"rootDirs": ["."],
"moduleResolution": "bundler"
"rootDirs": ["."]
},
"exclude": ["dist/**/*"],
"include": [

View File

@@ -13,14 +13,13 @@
"url": "http://github.com/grafana/grafana.git",
"directory": "packages/grafana-schema"
},
"main": "./src/index.ts",
"module": "./src/index.ts",
"types": "./src/index.ts",
"main": "src/index.ts",
"types": "src/index.ts",
"publishConfig": {
"access": "public",
"main": "./dist/cjs/index.cjs",
"module": "./dist/esm/index.mjs",
"types": "./dist/types/index.d.ts"
"types": "./dist/types/index.d.ts",
"access": "public"
},
"files": [
"dist",

View File

@@ -15,12 +15,7 @@ export default [
{
input: entryPoint,
plugins,
output: [
// Schema still uses publishConfig to define output directory.
// TODO: Migrate this package to use exports.
cjsOutput(pkg, 'grafana-schema', { dir: path.dirname(pkg.publishConfig.main) }),
esmOutput(pkg, 'grafana-schema', { dir: path.dirname(pkg.publishConfig.module) }),
],
output: [cjsOutput(pkg), esmOutput(pkg, 'grafana-schema')],
treeshake: false,
},
{

View File

@@ -3,12 +3,10 @@
"compilerOptions": {
"declaration": true,
"jsx": "react-jsx",
"baseUrl": "./",
"declarationDir": "./dist/types",
"emitDeclarationOnly": true,
"isolatedModules": true,
"rootDirs": ["."],
"moduleResolution": "bundler"
"rootDirs": ["."]
},
"exclude": ["dist/**/*"],
"include": ["src/**/*.ts*"]

View File

@@ -1,3 +1,3 @@
import { createMonitoringLogger, MonitoringLogger } from '@grafana/runtime';
import { createMonitoringLogger } from '@grafana/runtime';
export const sqlPluginLogger: MonitoringLogger = createMonitoringLogger('features.plugins.sql');
export const sqlPluginLogger = createMonitoringLogger('features.plugins.sql');

View File

@@ -8,8 +8,7 @@
"emitDeclarationOnly": true,
"isolatedModules": true,
"strict": true,
"rootDirs": ["."],
"moduleResolution": "bundler"
"rootDirs": ["."]
},
"exclude": ["dist/**/*"],
"include": ["src/**/*.ts*", "../../public/app/types/*.d.ts", "../grafana-ui/src/types/*.d.ts"]

View File

@@ -95,16 +95,6 @@ const mainConfig: StorybookConfig = {
},
});
// Tell storybook to resolve imports with the @grafana-app/source condition for
// the packages in this repo.
if (config && config.resolve) {
if (Array.isArray(config.resolve.conditionNames)) {
config.resolve.conditionNames.unshift('@grafana-app/source');
} else {
config.resolve.conditionNames = ['@grafana-app/source', '...'];
}
}
return config;
},
};

View File

@@ -1,7 +1,8 @@
{
"compilerOptions": {
"declarationDir": "dist",
"noUnusedLocals": false
"noUnusedLocals": false,
"outDir": "compiled"
},
"extends": "../tsconfig.json",
"include": ["../src/**/*.ts*", "../../../public/app/types/svg.d.ts"]

View File

@@ -16,28 +16,28 @@
"url": "http://github.com/grafana/grafana.git",
"directory": "packages/grafana-ui"
},
"main": "./dist/cjs/index.cjs",
"module": "./dist/esm/index.mjs",
"types": "./dist/types/index.d.ts",
"main": "src/index.ts",
"types": "src/index.ts",
"module": "src/index.ts",
"exports": {
"./package.json": "./package.json",
".": {
"@grafana-app/source": "./src/index.ts",
"types": "./dist/types/index.d.ts",
"import": "./dist/esm/index.mjs",
"require": "./dist/cjs/index.cjs"
},
"./unstable": {
"@grafana-app/source": "./src/unstable.ts",
"types": "./dist/types/unstable.d.ts",
"import": "./dist/esm/unstable.mjs",
"require": "./dist/cjs/unstable.cjs"
"import": "./src/index.ts",
"require": "./src/index.ts"
},
"./internal": {
"@grafana-app/source": "./src/internal/index.ts"
"import": "./src/internal/index.ts",
"require": "./src/internal/index.ts"
},
"./unstable": {
"import": "./src/unstable.ts",
"require": "./src/unstable.ts"
}
},
"publishConfig": {
"main": "./dist/cjs/index.cjs",
"module": "./dist/esm/index.mjs",
"types": "./dist/types/index.d.ts",
"access": "public"
},
"files": [
@@ -55,8 +55,8 @@
"storybook:build": "storybook build -o ./dist/storybook -c .storybook",
"storybook:test": "test-storybook --url http://localhost:9001",
"typecheck": "tsc --emitDeclarationOnly false --noEmit",
"prepack": "cp package.json package.json.bak && node ../../scripts/prepare-npm-package.js",
"postpack": "mv package.json.bak package.json"
"prepack": "cp package.json package.json.bak && ALIAS_PACKAGE_NAME=unstable node ../../scripts/prepare-npm-package.js",
"postpack": "mv package.json.bak package.json && rimraf ./unstable"
},
"browserslist": [
"defaults",

View File

@@ -24,7 +24,7 @@ export default [
flatten: false,
}),
],
output: [cjsOutput(pkg, 'grafana-ui'), esmOutput(pkg, 'grafana-ui')],
output: [cjsOutput(pkg), esmOutput(pkg, 'grafana-ui')],
treeshake: false,
},
{
@@ -37,7 +37,7 @@ export default [
flatten: false,
}),
],
output: [cjsOutput(pkg, 'grafana-ui'), esmOutput(pkg, 'grafana-ui')],
output: [cjsOutput(pkg), esmOutput(pkg, 'grafana-ui')],
treeshake: false,
},
];

View File

@@ -94,5 +94,4 @@ class UnthemedSelectOptionGroup extends PureComponent<ExtendedGroupProps, State>
}
}
// TODO: type this properly
export const SelectOptionGroup: React.FC<ExtendedGroupProps> = withTheme2(UnthemedSelectOptionGroup);
export const SelectOptionGroup = withTheme2(UnthemedSelectOptionGroup);

View File

@@ -79,7 +79,7 @@ export const getModalStyles = (theme: GrafanaTheme2) => {
modalContent: css({
overflow: 'auto',
padding: theme.spacing(3, 3, 0, 3),
marginBottom: theme.spacing(2.5),
marginBottom: theme.spacing(3),
scrollbarWidth: 'thin',
width: '100%',

View File

@@ -76,5 +76,4 @@ class UnthemedValueContainer<Option, isMulti extends boolean, Group extends Grou
}
}
export const ValueContainer: React.FC<ValueContainerProps<unknown, boolean, GroupBase<unknown>>> =
withTheme2(UnthemedValueContainer);
export const ValueContainer = withTheme2(UnthemedValueContainer);

View File

@@ -8,8 +8,7 @@
"emitDeclarationOnly": true,
"isolatedModules": true,
"allowJs": true,
"rootDirs": ["."],
"moduleResolution": "bundler"
"rootDirs": ["."]
},
"exclude": ["dist/**/*"],
"include": ["../../public/test/setupTests.ts", "../../public/app/types/*.d.ts", "src/**/*.ts*"],

View File

@@ -23,29 +23,25 @@ export const plugins = [
];
// Generates a rollup configuration for commonjs output.
export function cjsOutput(pkg, pkgName, overrides = {}) {
export function cjsOutput(pkg) {
return {
format: 'cjs',
sourcemap: true,
dir: dirname(pkg.main),
dir: dirname(pkg.publishConfig.main),
entryFileNames: '[name].cjs',
preserveModules: true,
preserveModulesRoot: resolve(projectCwd, `packages/${pkgName}/src`),
esModule: true,
interop: 'compat',
...overrides,
};
}
// Generate a rollup configuration for es module output.
export function esmOutput(pkg, pkgName, overrides = {}) {
export function esmOutput(pkg, pkgName) {
return {
format: 'esm',
sourcemap: true,
dir: dirname(pkg.module),
dir: dirname(pkg.publishConfig.module),
entryFileNames: '[name].mjs',
preserveModules: true,
preserveModulesRoot: resolve(projectCwd, `packages/${pkgName}/src`),
...overrides,
};
}

View File

@@ -113,7 +113,6 @@ func ProvideMigratorDashboardAccessor(
dashboardPermissionSvc: nil, // not needed for migration
libraryPanelSvc: nil, // not needed for migration
accessControl: accessControl,
log: log.New("legacy.dashboard.migrator.accessor"),
}
}
@@ -137,7 +136,6 @@ func NewDashboardSQLAccess(sql legacysql.LegacyDatabaseProvider,
dashboardPermissionSvc: dashboardPermissionSvc,
libraryPanelSvc: libraryPanelSvc,
accessControl: accessControl,
log: log.New("legacy.dashboard.accessor"),
}
}

View File

@@ -71,6 +71,98 @@ func (_c *MockJobProgressRecorder_Complete_Call) RunAndReturn(run func(context.C
return _c
}
// HasDirPathFailedDeletion provides a mock function with given fields: folderPath
func (_m *MockJobProgressRecorder) HasDirPathFailedDeletion(folderPath string) bool {
ret := _m.Called(folderPath)
if len(ret) == 0 {
panic("no return value specified for HasDirPathFailedDeletion")
}
var r0 bool
if rf, ok := ret.Get(0).(func(string) bool); ok {
r0 = rf(folderPath)
} else {
r0 = ret.Get(0).(bool)
}
return r0
}
// MockJobProgressRecorder_HasDirPathFailedDeletion_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'HasDirPathFailedDeletion'
type MockJobProgressRecorder_HasDirPathFailedDeletion_Call struct {
*mock.Call
}
// HasDirPathFailedDeletion is a helper method to define mock.On call
// - folderPath string
func (_e *MockJobProgressRecorder_Expecter) HasDirPathFailedDeletion(folderPath interface{}) *MockJobProgressRecorder_HasDirPathFailedDeletion_Call {
return &MockJobProgressRecorder_HasDirPathFailedDeletion_Call{Call: _e.mock.On("HasDirPathFailedDeletion", folderPath)}
}
func (_c *MockJobProgressRecorder_HasDirPathFailedDeletion_Call) Run(run func(folderPath string)) *MockJobProgressRecorder_HasDirPathFailedDeletion_Call {
_c.Call.Run(func(args mock.Arguments) {
run(args[0].(string))
})
return _c
}
func (_c *MockJobProgressRecorder_HasDirPathFailedDeletion_Call) Return(_a0 bool) *MockJobProgressRecorder_HasDirPathFailedDeletion_Call {
_c.Call.Return(_a0)
return _c
}
func (_c *MockJobProgressRecorder_HasDirPathFailedDeletion_Call) RunAndReturn(run func(string) bool) *MockJobProgressRecorder_HasDirPathFailedDeletion_Call {
_c.Call.Return(run)
return _c
}
// HasDirPathFailedCreation provides a mock function with given fields: path
func (_m *MockJobProgressRecorder) HasDirPathFailedCreation(path string) bool {
ret := _m.Called(path)
if len(ret) == 0 {
panic("no return value specified for HasDirPathFailedCreation")
}
var r0 bool
if rf, ok := ret.Get(0).(func(string) bool); ok {
r0 = rf(path)
} else {
r0 = ret.Get(0).(bool)
}
return r0
}
// MockJobProgressRecorder_HasDirPathFailedCreation_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'HasDirPathFailedCreation'
type MockJobProgressRecorder_HasDirPathFailedCreation_Call struct {
*mock.Call
}
// HasDirPathFailedCreation is a helper method to define mock.On call
// - path string
func (_e *MockJobProgressRecorder_Expecter) HasDirPathFailedCreation(path interface{}) *MockJobProgressRecorder_HasDirPathFailedCreation_Call {
return &MockJobProgressRecorder_HasDirPathFailedCreation_Call{Call: _e.mock.On("HasDirPathFailedCreation", path)}
}
func (_c *MockJobProgressRecorder_HasDirPathFailedCreation_Call) Run(run func(path string)) *MockJobProgressRecorder_HasDirPathFailedCreation_Call {
_c.Call.Run(func(args mock.Arguments) {
run(args[0].(string))
})
return _c
}
func (_c *MockJobProgressRecorder_HasDirPathFailedCreation_Call) Return(_a0 bool) *MockJobProgressRecorder_HasDirPathFailedCreation_Call {
_c.Call.Return(_a0)
return _c
}
func (_c *MockJobProgressRecorder_HasDirPathFailedCreation_Call) RunAndReturn(run func(string) bool) *MockJobProgressRecorder_HasDirPathFailedCreation_Call {
_c.Call.Return(run)
return _c
}
// Record provides a mock function with given fields: ctx, result
func (_m *MockJobProgressRecorder) Record(ctx context.Context, result JobResourceResult) {
_m.Called(ctx, result)

View File

@@ -2,6 +2,7 @@ package jobs
import (
"context"
"errors"
"fmt"
"sync"
"time"
@@ -9,6 +10,8 @@ import (
"github.com/grafana/grafana-app-sdk/logging"
provisioning "github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1"
"github.com/grafana/grafana/apps/provisioning/pkg/repository"
"github.com/grafana/grafana/apps/provisioning/pkg/safepath"
"github.com/grafana/grafana/pkg/registry/apis/provisioning/resources"
)
// maybeNotifyProgress will only notify if a certain amount of time has passed
@@ -58,6 +61,8 @@ type jobProgressRecorder struct {
notifyImmediatelyFn ProgressFn
maybeNotifyFn ProgressFn
summaries map[string]*provisioning.JobResourceSummary
failedCreations []string // Tracks folder paths that failed to be created
failedDeletions []string // Tracks resource paths that failed to be deleted
}
func newJobProgressRecorder(ProgressFn ProgressFn) JobProgressRecorder {
@@ -84,10 +89,26 @@ func (r *jobProgressRecorder) Record(ctx context.Context, result JobResourceResu
if result.Error != nil {
shouldLogError = true
logErr = result.Error
if len(r.errors) < 20 {
r.errors = append(r.errors, result.Error.Error())
// Don't count ignored actions as errors in error count or error list
if result.Action != repository.FileActionIgnored {
if len(r.errors) < 20 {
r.errors = append(r.errors, result.Error.Error())
}
r.errorCount++
}
// Automatically track failed operations based on error type and action
// Check if this is a PathCreationError (folder creation failure)
var pathErr *resources.PathCreationError
if errors.As(result.Error, &pathErr) {
r.failedCreations = append(r.failedCreations, pathErr.Path)
}
// Track failed deletions, any deletion will stop the deletion of the parent folder (as it won't be empty)
if result.Action == repository.FileActionDeleted {
r.failedDeletions = append(r.failedDeletions, result.Path)
}
r.errorCount++
}
r.updateSummary(result)
@@ -112,6 +133,8 @@ func (r *jobProgressRecorder) ResetResults() {
r.errorCount = 0
r.errors = nil
r.summaries = make(map[string]*provisioning.JobResourceSummary)
r.failedCreations = nil
r.failedDeletions = nil
}
func (r *jobProgressRecorder) SetMessage(ctx context.Context, msg string) {
@@ -309,3 +332,29 @@ func (r *jobProgressRecorder) Complete(ctx context.Context, err error) provision
return jobStatus
}
// HasDirPathFailedCreation checks if a path is nested under any failed folder creation
func (r *jobProgressRecorder) HasDirPathFailedCreation(path string) bool {
r.mu.RLock()
defer r.mu.RUnlock()
for _, failedCreation := range r.failedCreations {
if safepath.InDir(path, failedCreation) {
return true
}
}
return false
}
// HasDirPathFailedDeletion checks if any resource deletions failed under a folder path
func (r *jobProgressRecorder) HasDirPathFailedDeletion(folderPath string) bool {
r.mu.RLock()
defer r.mu.RUnlock()
for _, failedDeletion := range r.failedDeletions {
if safepath.InDir(failedDeletion, folderPath) {
return true
}
}
return false
}

View File

@@ -7,6 +7,7 @@ import (
provisioning "github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1"
"github.com/grafana/grafana/apps/provisioning/pkg/repository"
"github.com/grafana/grafana/pkg/registry/apis/provisioning/resources"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@@ -252,3 +253,215 @@ func TestJobProgressRecorderWarningOnlyNoErrors(t *testing.T) {
require.NotNil(t, finalStatus.Warnings)
assert.Len(t, finalStatus.Warnings, 1)
}
func TestJobProgressRecorderFolderFailureTracking(t *testing.T) {
ctx := context.Background()
// Create a progress recorder
mockProgressFn := func(ctx context.Context, status provisioning.JobStatus) error {
return nil
}
recorder := newJobProgressRecorder(mockProgressFn).(*jobProgressRecorder)
// Record a folder creation failure with PathCreationError
pathErr := &resources.PathCreationError{
Path: "folder1/",
Err: assert.AnError,
}
recorder.Record(ctx, JobResourceResult{
Path: "folder1/file.json",
Action: repository.FileActionCreated,
Error: pathErr,
})
// Record another PathCreationError for a different folder
pathErr2 := &resources.PathCreationError{
Path: "folder2/subfolder/",
Err: assert.AnError,
}
recorder.Record(ctx, JobResourceResult{
Path: "folder2/subfolder/file.json",
Action: repository.FileActionCreated,
Error: pathErr2,
})
// Record a deletion failure
recorder.Record(ctx, JobResourceResult{
Path: "folder3/file1.json",
Action: repository.FileActionDeleted,
Error: assert.AnError,
})
// Record another deletion failure
recorder.Record(ctx, JobResourceResult{
Path: "folder4/subfolder/file2.json",
Action: repository.FileActionDeleted,
Error: assert.AnError,
})
// Verify failed creations are tracked
recorder.mu.RLock()
assert.Len(t, recorder.failedCreations, 2)
assert.Contains(t, recorder.failedCreations, "folder1/")
assert.Contains(t, recorder.failedCreations, "folder2/subfolder/")
// Verify failed deletions are tracked
assert.Len(t, recorder.failedDeletions, 2)
assert.Contains(t, recorder.failedDeletions, "folder3/file1.json")
assert.Contains(t, recorder.failedDeletions, "folder4/subfolder/file2.json")
recorder.mu.RUnlock()
}
func TestJobProgressRecorderHasDirPathFailedCreation(t *testing.T) {
ctx := context.Background()
// Create a progress recorder
mockProgressFn := func(ctx context.Context, status provisioning.JobStatus) error {
return nil
}
recorder := newJobProgressRecorder(mockProgressFn).(*jobProgressRecorder)
// Add failed creations via Record
pathErr1 := &resources.PathCreationError{
Path: "folder1/",
Err: assert.AnError,
}
recorder.Record(ctx, JobResourceResult{
Path: "folder1/file.json",
Action: repository.FileActionCreated,
Error: pathErr1,
})
pathErr2 := &resources.PathCreationError{
Path: "folder2/subfolder/",
Err: assert.AnError,
}
recorder.Record(ctx, JobResourceResult{
Path: "folder2/subfolder/file.json",
Action: repository.FileActionCreated,
Error: pathErr2,
})
// Test nested paths
assert.True(t, recorder.HasDirPathFailedCreation("folder1/file.json"))
assert.True(t, recorder.HasDirPathFailedCreation("folder1/nested/file.json"))
assert.True(t, recorder.HasDirPathFailedCreation("folder2/subfolder/file.json"))
// Test non-nested paths
assert.False(t, recorder.HasDirPathFailedCreation("other/file.json"))
assert.False(t, recorder.HasDirPathFailedCreation("folder3/file.json"))
assert.False(t, recorder.HasDirPathFailedCreation("file.json"))
}
func TestJobProgressRecorderHasDirPathFailedDeletion(t *testing.T) {
ctx := context.Background()
// Create a progress recorder
mockProgressFn := func(ctx context.Context, status provisioning.JobStatus) error {
return nil
}
recorder := newJobProgressRecorder(mockProgressFn).(*jobProgressRecorder)
// Add failed deletions via Record
recorder.Record(ctx, JobResourceResult{
Path: "folder1/file1.json",
Action: repository.FileActionDeleted,
Error: assert.AnError,
})
recorder.Record(ctx, JobResourceResult{
Path: "folder2/subfolder/file2.json",
Action: repository.FileActionDeleted,
Error: assert.AnError,
})
recorder.Record(ctx, JobResourceResult{
Path: "folder3/nested/deep/file3.json",
Action: repository.FileActionDeleted,
Error: assert.AnError,
})
// Test folder paths with failed deletions
assert.True(t, recorder.HasDirPathFailedDeletion("folder1/"))
assert.True(t, recorder.HasDirPathFailedDeletion("folder2/"))
assert.True(t, recorder.HasDirPathFailedDeletion("folder2/subfolder/"))
assert.True(t, recorder.HasDirPathFailedDeletion("folder3/"))
assert.True(t, recorder.HasDirPathFailedDeletion("folder3/nested/"))
assert.True(t, recorder.HasDirPathFailedDeletion("folder3/nested/deep/"))
// Test folder paths without failed deletions
assert.False(t, recorder.HasDirPathFailedDeletion("other/"))
assert.False(t, recorder.HasDirPathFailedDeletion("different/"))
}
func TestJobProgressRecorderResetResults(t *testing.T) {
ctx := context.Background()
// Create a progress recorder
mockProgressFn := func(ctx context.Context, status provisioning.JobStatus) error {
return nil
}
recorder := newJobProgressRecorder(mockProgressFn).(*jobProgressRecorder)
// Add some data via Record
pathErr := &resources.PathCreationError{
Path: "folder1/",
Err: assert.AnError,
}
recorder.Record(ctx, JobResourceResult{
Path: "folder1/file.json",
Action: repository.FileActionCreated,
Error: pathErr,
})
recorder.Record(ctx, JobResourceResult{
Path: "folder2/file.json",
Action: repository.FileActionDeleted,
Error: assert.AnError,
})
// Verify data is stored
recorder.mu.RLock()
assert.Len(t, recorder.failedCreations, 1)
assert.Len(t, recorder.failedDeletions, 1)
recorder.mu.RUnlock()
// Reset results
recorder.ResetResults()
// Verify data is cleared
recorder.mu.RLock()
assert.Nil(t, recorder.failedCreations)
assert.Nil(t, recorder.failedDeletions)
recorder.mu.RUnlock()
}
func TestJobProgressRecorderIgnoredActionsDontCountAsErrors(t *testing.T) {
ctx := context.Background()
// Create a progress recorder
mockProgressFn := func(ctx context.Context, status provisioning.JobStatus) error {
return nil
}
recorder := newJobProgressRecorder(mockProgressFn).(*jobProgressRecorder)
// Record an ignored action with error
recorder.Record(ctx, JobResourceResult{
Path: "folder1/file1.json",
Action: repository.FileActionIgnored,
Error: assert.AnError,
})
// Record a real error for comparison
recorder.Record(ctx, JobResourceResult{
Path: "folder2/file2.json",
Action: repository.FileActionCreated,
Error: assert.AnError,
})
// Verify error count doesn't include ignored actions
recorder.mu.RLock()
assert.Equal(t, 1, recorder.errorCount, "ignored actions should not be counted as errors")
assert.Len(t, recorder.errors, 1, "ignored action errors should not be in error list")
recorder.mu.RUnlock()
}

View File

@@ -29,6 +29,10 @@ type JobProgressRecorder interface {
StrictMaxErrors(maxErrors int)
SetRefURLs(ctx context.Context, refURLs *provisioning.RepositoryURLs)
Complete(ctx context.Context, err error) provisioning.JobStatus
// HasDirPathFailedCreation checks if a path has any folder creations that failed
HasDirPathFailedCreation(path string) bool
// HasDirPathFailedDeletion checks if a folderPath has any folder deletions that failed
HasDirPathFailedDeletion(folderPath string) bool
}
// Worker is a worker that can process a job

View File

@@ -80,6 +80,34 @@ func applyChange(ctx context.Context, change ResourceFileChange, clients resourc
return
}
// Check if we need to skip this action because of a previous failure on a parent/child folder
if change.Action != repository.FileActionDeleted && progress.HasDirPathFailedCreation(change.Path) {
// Skip this resource since its parent folder failed to be created
skipCtx, skipSpan := tracer.Start(ctx, "provisioning.sync.full.apply_changes.skip_nested_resource")
progress.Record(skipCtx, jobs.JobResourceResult{
Path: change.Path,
Action: repository.FileActionIgnored,
Warning: fmt.Errorf("skipped: parent folder creation failed"),
})
skipSpan.End()
return
}
if change.Action == repository.FileActionDeleted && safepath.IsDir(change.Path) {
if progress.HasDirPathFailedDeletion(change.Path) {
skipCtx, skipSpan := tracer.Start(ctx, "provisioning.sync.full.apply_changes.skip_folder_with_failed_deletions")
progress.Record(skipCtx, jobs.JobResourceResult{
Path: change.Path,
Action: repository.FileActionIgnored,
Group: resources.FolderKind.Group,
Kind: resources.FolderKind.Kind,
Warning: fmt.Errorf("skipped: child resource deletions failed"),
})
skipSpan.End()
return
}
}
if change.Action == repository.FileActionDeleted {
deleteCtx, deleteSpan := tracer.Start(ctx, "provisioning.sync.full.apply_changes.delete")
result := jobs.JobResourceResult{
@@ -138,6 +166,7 @@ func applyChange(ctx context.Context, change ResourceFileChange, clients resourc
ensureFolderSpan.RecordError(err)
ensureFolderSpan.End()
progress.Record(ctx, result)
return
}

View File

@@ -0,0 +1,432 @@
package sync
import (
"context"
"fmt"
"testing"
"github.com/prometheus/client_golang/prometheus"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/runtime/schema"
dynamicfake "k8s.io/client-go/dynamic/fake"
k8testing "k8s.io/client-go/testing"
provisioning "github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1"
"github.com/grafana/grafana/apps/provisioning/pkg/repository"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/registry/apis/provisioning/jobs"
"github.com/grafana/grafana/pkg/registry/apis/provisioning/resources"
)
/*
TestFullSync_HierarchicalErrorHandling tests the hierarchical error handling behavior:
FOLDER CREATION FAILURES:
- When a folder fails to be created with PathCreationError, all nested resources are skipped
- Nested resources are recorded with FileActionIgnored and error "skipped: parent folder creation failed"
- Only the folder creation error counts toward error limits
- Nested resource skips do NOT count toward error limits
FOLDER DELETION FAILURES:
- When a file deletion fails, it's tracked in failedDeletions
- When cleaning up folders, we check HasDirPathFailedDeletion()
- If children failed to delete, folder deletion is skipped with FileActionIgnored
- This prevents orphaning resources that still exist
DELETIONS NOT AFFECTED BY CREATION FAILURES:
- If a folder creation fails, deletion operations for resources in that folder still proceed
- This is because the resource might already exist from a previous sync
- Only creations/updates/renames are affected by failed folder creation
AUTOMATIC TRACKING:
- Record() automatically detects PathCreationError and adds to failedCreations
- Record() automatically detects deletion failures and adds to failedDeletions
- No manual calls to AddFailedCreation/AddFailedDeletion needed
*/
func TestFullSync_HierarchicalErrorHandling(t *testing.T) { // nolint:gocyclo
tests := []struct {
name string
setupMocks func(*repository.MockRepository, *resources.MockRepositoryResources, *resources.MockResourceClients, *jobs.MockJobProgressRecorder, *dynamicfake.FakeDynamicClient)
changes []ResourceFileChange
description string
expectError bool
errorContains string
}{
{
name: "folder creation fails, nested file skipped",
description: "When folder1/ fails to create, folder1/file.json should be skipped with FileActionIgnored",
changes: []ResourceFileChange{
{Path: "folder1/file.json", Action: repository.FileActionCreated},
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, _ *dynamicfake.FakeDynamicClient) {
// First, check if nested under failed creation - not yet
progress.On("HasDirPathFailedCreation", "folder1/file.json").Return(false).Once()
// WriteResourceFromFile fails with PathCreationError for folder1/
folderErr := &resources.PathCreationError{Path: "folder1/", Err: fmt.Errorf("permission denied")}
repoResources.On("WriteResourceFromFile", mock.Anything, "folder1/file.json", "").
Return("", schema.GroupVersionKind{}, folderErr).Once()
// File will be recorded with error, triggering automatic tracking of folder1/ failure
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/file.json" && r.Error != nil && r.Action == repository.FileActionCreated
})).Return().Once()
},
},
{
name: "folder creation fails, multiple nested resources skipped",
description: "When folder1/ fails to create, all nested resources (subfolder, files) are skipped",
changes: []ResourceFileChange{
{Path: "folder1/file1.json", Action: repository.FileActionCreated},
{Path: "folder1/subfolder/file2.json", Action: repository.FileActionCreated},
{Path: "folder1/file3.json", Action: repository.FileActionCreated},
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, _ *dynamicfake.FakeDynamicClient) {
// First file triggers folder creation failure
progress.On("HasDirPathFailedCreation", "folder1/file1.json").Return(false).Once()
folderErr := &resources.PathCreationError{Path: "folder1/", Err: fmt.Errorf("permission denied")}
repoResources.On("WriteResourceFromFile", mock.Anything, "folder1/file1.json", "").
Return("", schema.GroupVersionKind{}, folderErr).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/file1.json" && r.Error != nil
})).Return().Once()
// Subsequent files in same folder are skipped
progress.On("HasDirPathFailedCreation", "folder1/subfolder/file2.json").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/subfolder/file2.json" &&
r.Action == repository.FileActionIgnored &&
r.Warning != nil &&
r.Warning.Error() == "skipped: parent folder creation failed"
})).Return().Once()
progress.On("HasDirPathFailedCreation", "folder1/file3.json").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/file3.json" &&
r.Action == repository.FileActionIgnored &&
r.Warning != nil &&
r.Warning.Error() == "skipped: parent folder creation failed"
})).Return().Once()
},
},
{
name: "file deletion failure tracked",
description: "When a file deletion fails, it's automatically tracked in failedDeletions",
changes: []ResourceFileChange{
{
Path: "folder1/file.json",
Action: repository.FileActionDeleted,
Existing: &provisioning.ResourceListItem{
Name: "file1",
Group: "dashboard.grafana.app",
Resource: "dashboards",
},
},
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, dynamicClient *dynamicfake.FakeDynamicClient) {
gvk := schema.GroupVersionKind{Group: "dashboard.grafana.app", Kind: "Dashboard", Version: "v1"}
gvr := schema.GroupVersionResource{Group: "dashboard.grafana.app", Resource: "dashboards", Version: "v1"}
clients.On("ForResource", mock.Anything, mock.MatchedBy(func(gvr schema.GroupVersionResource) bool {
return gvr.Group == "dashboard.grafana.app"
})).Return(dynamicClient.Resource(gvr), gvk, nil)
// File deletion fails
dynamicClient.PrependReactor("delete", "dashboards", func(action k8testing.Action) (bool, runtime.Object, error) {
return true, nil, fmt.Errorf("permission denied")
})
// File deletion recorded with error, automatically tracked in failedDeletions
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/file.json" &&
r.Action == repository.FileActionDeleted &&
r.Error != nil
})).Return().Once()
},
},
{
name: "deletion proceeds despite creation failure",
description: "When folder1/ fails to create, deletion of folder1/file2.json still proceeds (resource might exist from previous sync)",
changes: []ResourceFileChange{
{Path: "folder1/file1.json", Action: repository.FileActionCreated},
{
Path: "folder1/file2.json",
Action: repository.FileActionDeleted,
Existing: &provisioning.ResourceListItem{
Name: "file2",
Group: "dashboard.grafana.app",
Resource: "dashboards",
},
},
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, dynamicClient *dynamicfake.FakeDynamicClient) {
// Creation fails
progress.On("HasDirPathFailedCreation", "folder1/file1.json").Return(false).Once()
folderErr := &resources.PathCreationError{Path: "folder1/", Err: fmt.Errorf("permission denied")}
repoResources.On("WriteResourceFromFile", mock.Anything, "folder1/file1.json", "").
Return("", schema.GroupVersionKind{}, folderErr).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/file1.json" && r.Error != nil
})).Return().Once()
// Deletion proceeds (NOT checking HasDirPathFailedCreation for deletions)
// Note: deletion will fail because resource doesn't exist, but that's fine for this test
gvk := schema.GroupVersionKind{Group: "dashboard.grafana.app", Kind: "Dashboard", Version: "v1"}
gvr := schema.GroupVersionResource{Group: "dashboard.grafana.app", Resource: "dashboards", Version: "v1"}
clients.On("ForResource", mock.Anything, mock.MatchedBy(func(gvr schema.GroupVersionResource) bool {
return gvr.Group == "dashboard.grafana.app"
})).Return(dynamicClient.Resource(gvr), gvk, nil)
// Record deletion attempt (will have error since resource doesn't exist, but that's ok)
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/file2.json" &&
r.Action == repository.FileActionDeleted
// Not checking r.Error because resource doesn't exist in fake client
})).Return().Once()
},
},
{
name: "multi-level nesting - all skipped",
description: "When level1/ fails, level1/level2/level3/file.json is also skipped",
changes: []ResourceFileChange{
{Path: "level1/file1.json", Action: repository.FileActionCreated},
{Path: "level1/level2/file2.json", Action: repository.FileActionCreated},
{Path: "level1/level2/level3/file3.json", Action: repository.FileActionCreated},
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, _ *dynamicfake.FakeDynamicClient) {
// First file triggers level1/ failure
progress.On("HasDirPathFailedCreation", "level1/file1.json").Return(false).Once()
folderErr := &resources.PathCreationError{Path: "level1/", Err: fmt.Errorf("permission denied")}
repoResources.On("WriteResourceFromFile", mock.Anything, "level1/file1.json", "").
Return("", schema.GroupVersionKind{}, folderErr).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "level1/file1.json" && r.Error != nil
})).Return().Once()
// All nested files are skipped
for _, path := range []string{"level1/level2/file2.json", "level1/level2/level3/file3.json"} {
progress.On("HasDirPathFailedCreation", path).Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == path && r.Action == repository.FileActionIgnored
})).Return().Once()
}
},
},
{
name: "mixed success and failure",
description: "When success/ works and failure/ fails, only failure/* are skipped",
changes: []ResourceFileChange{
{Path: "success/file1.json", Action: repository.FileActionCreated},
{Path: "failure/file2.json", Action: repository.FileActionCreated},
{Path: "failure/nested/file3.json", Action: repository.FileActionCreated},
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, _ *dynamicfake.FakeDynamicClient) {
// Success path works
progress.On("HasDirPathFailedCreation", "success/file1.json").Return(false).Once()
repoResources.On("WriteResourceFromFile", mock.Anything, "success/file1.json", "").
Return("resource1", schema.GroupVersionKind{Kind: "Dashboard"}, nil).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "success/file1.json" && r.Error == nil
})).Return().Once()
// Failure path fails
progress.On("HasDirPathFailedCreation", "failure/file2.json").Return(false).Once()
folderErr := &resources.PathCreationError{Path: "failure/", Err: fmt.Errorf("disk full")}
repoResources.On("WriteResourceFromFile", mock.Anything, "failure/file2.json", "").
Return("", schema.GroupVersionKind{}, folderErr).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "failure/file2.json" && r.Error != nil
})).Return().Once()
// Nested file in failure path is skipped
progress.On("HasDirPathFailedCreation", "failure/nested/file3.json").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "failure/nested/file3.json" && r.Action == repository.FileActionIgnored
})).Return().Once()
},
},
{
name: "folder creation fails with explicit folder in changes",
description: "When folder1/ is explicitly in changes and fails to create, all nested resources (subfolders and files) are skipped",
changes: []ResourceFileChange{
{Path: "folder1/", Action: repository.FileActionCreated},
{Path: "folder1/subfolder/", Action: repository.FileActionCreated},
{Path: "folder1/file1.json", Action: repository.FileActionCreated},
{Path: "folder1/subfolder/file2.json", Action: repository.FileActionCreated},
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, _ *dynamicfake.FakeDynamicClient) {
progress.On("HasDirPathFailedCreation", "folder1/").Return(false).Once()
folderErr := &resources.PathCreationError{Path: "folder1/", Err: fmt.Errorf("permission denied")}
repoResources.On("EnsureFolderPathExist", mock.Anything, "folder1/").Return("", folderErr).Once()
progress.On("HasDirPathFailedCreation", "folder1/subfolder/").Return(true).Once()
progress.On("HasDirPathFailedCreation", "folder1/file1.json").Return(true).Once()
progress.On("HasDirPathFailedCreation", "folder1/subfolder/file2.json").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/" && r.Error != nil
})).Return().Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/subfolder/" && r.Action == repository.FileActionIgnored
})).Return().Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/file1.json" && r.Action == repository.FileActionIgnored
})).Return().Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/subfolder/file2.json" && r.Action == repository.FileActionIgnored
})).Return().Once()
},
},
{
name: "folder deletion prevented when child deletion fails",
description: "When a file deletion fails, folder deletion is skipped with FileActionIgnored to prevent orphaning resources",
changes: []ResourceFileChange{
{
Path: "folder1/file1.json",
Action: repository.FileActionDeleted,
Existing: &provisioning.ResourceListItem{Name: "file1", Group: "dashboard.grafana.app", Resource: "dashboards"},
},
{Path: "folder1/", Action: repository.FileActionDeleted, Existing: &provisioning.ResourceListItem{Name: "folder1", Group: "folder.grafana.app", Resource: "Folder"}},
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, dynamicClient *dynamicfake.FakeDynamicClient) {
gvk := schema.GroupVersionKind{Group: "dashboard.grafana.app", Kind: "Dashboard", Version: "v1"}
gvr := schema.GroupVersionResource{Group: "dashboard.grafana.app", Resource: "dashboards", Version: "v1"}
clients.On("ForResource", mock.Anything, mock.MatchedBy(func(gvr schema.GroupVersionResource) bool {
return gvr.Group == "dashboard.grafana.app"
})).Return(dynamicClient.Resource(gvr), gvk, nil)
dynamicClient.PrependReactor("delete", "dashboards", func(action k8testing.Action) (bool, runtime.Object, error) {
return true, nil, fmt.Errorf("permission denied")
})
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/file1.json" && r.Error != nil
})).Return().Once()
progress.On("HasDirPathFailedDeletion", "folder1/").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/" && r.Action == repository.FileActionIgnored
})).Return().Once()
},
},
{
name: "multiple folder deletion failures",
description: "When multiple independent folders have child deletion failures, all folder deletions are skipped",
changes: []ResourceFileChange{
{Path: "folder1/file1.json", Action: repository.FileActionDeleted, Existing: &provisioning.ResourceListItem{Name: "file1", Group: "dashboard.grafana.app", Resource: "dashboards"}},
{Path: "folder1/", Action: repository.FileActionDeleted},
{Path: "folder2/file2.json", Action: repository.FileActionDeleted, Existing: &provisioning.ResourceListItem{Name: "file2", Group: "dashboard.grafana.app", Resource: "dashboards"}},
{Path: "folder2/", Action: repository.FileActionDeleted},
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, dynamicClient *dynamicfake.FakeDynamicClient) {
gvk := schema.GroupVersionKind{Group: "dashboard.grafana.app", Kind: "Dashboard", Version: "v1"}
gvr := schema.GroupVersionResource{Group: "dashboard.grafana.app", Resource: "dashboards", Version: "v1"}
clients.On("ForResource", mock.Anything, mock.MatchedBy(func(gvr schema.GroupVersionResource) bool {
return gvr.Group == "dashboard.grafana.app"
})).Return(dynamicClient.Resource(gvr), gvk, nil)
dynamicClient.PrependReactor("delete", "dashboards", func(action k8testing.Action) (bool, runtime.Object, error) {
return true, nil, fmt.Errorf("permission denied")
})
for _, path := range []string{"folder1/file1.json", "folder2/file2.json"} {
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == path && r.Error != nil
})).Return().Once()
}
progress.On("HasDirPathFailedDeletion", "folder1/").Return(true).Once()
progress.On("HasDirPathFailedDeletion", "folder2/").Return(true).Once()
for _, path := range []string{"folder1/", "folder2/"} {
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == path && r.Action == repository.FileActionIgnored
})).Return().Once()
}
},
},
{
name: "nested subfolder deletion failure",
description: "When a file deletion fails in a nested subfolder, both the subfolder and parent folder deletions are skipped",
changes: []ResourceFileChange{
{Path: "parent/subfolder/file.json", Action: repository.FileActionDeleted, Existing: &provisioning.ResourceListItem{Name: "file1", Group: "dashboard.grafana.app", Resource: "dashboards"}},
{Path: "parent/subfolder/", Action: repository.FileActionDeleted, Existing: &provisioning.ResourceListItem{Name: "subfolder", Group: "folder.grafana.app", Resource: "Folder"}},
{Path: "parent/", Action: repository.FileActionDeleted, Existing: &provisioning.ResourceListItem{Name: "parent", Group: "folder.grafana.app", Resource: "Folder"}},
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, dynamicClient *dynamicfake.FakeDynamicClient) {
gvk := schema.GroupVersionKind{Group: "dashboard.grafana.app", Kind: "Dashboard", Version: "v1"}
gvr := schema.GroupVersionResource{Group: "dashboard.grafana.app", Resource: "dashboards", Version: "v1"}
clients.On("ForResource", mock.Anything, mock.MatchedBy(func(gvr schema.GroupVersionResource) bool {
return gvr.Group == "dashboard.grafana.app"
})).Return(dynamicClient.Resource(gvr), gvk, nil)
dynamicClient.PrependReactor("delete", "dashboards", func(action k8testing.Action) (bool, runtime.Object, error) {
return true, nil, fmt.Errorf("permission denied")
})
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "parent/subfolder/file.json" && r.Error != nil
})).Return().Once()
progress.On("HasDirPathFailedDeletion", "parent/subfolder/").Return(true).Once()
progress.On("HasDirPathFailedDeletion", "parent/").Return(true).Once()
for _, path := range []string{"parent/subfolder/", "parent/"} {
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == path && r.Action == repository.FileActionIgnored
})).Return().Once()
}
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
scheme := runtime.NewScheme()
dynamicClient := dynamicfake.NewSimpleDynamicClient(scheme)
repo := repository.NewMockRepository(t)
repoResources := resources.NewMockRepositoryResources(t)
clients := resources.NewMockResourceClients(t)
progress := jobs.NewMockJobProgressRecorder(t)
compareFn := NewMockCompareFn(t)
repo.On("Config").Return(&provisioning.Repository{
ObjectMeta: metav1.ObjectMeta{Name: "test-repo"},
Spec: provisioning.RepositorySpec{Title: "Test Repo"},
})
tt.setupMocks(repo, repoResources, clients, progress, dynamicClient)
compareFn.On("Execute", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(tt.changes, nil)
progress.On("SetTotal", mock.Anything, len(tt.changes)).Return()
progress.On("TooManyErrors").Return(nil).Maybe()
err := FullSync(context.Background(), repo, compareFn.Execute, clients, "ref", repoResources, progress, tracing.NewNoopTracerService(), 10, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
if tt.expectError {
require.Error(t, err)
if tt.errorContains != "" {
require.Contains(t, err.Error(), tt.errorContains)
}
} else {
require.NoError(t, err)
}
progress.AssertExpectations(t)
repoResources.AssertExpectations(t)
})
}
}

View File

@@ -213,6 +213,10 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
return nil
})
progress.On("HasDirPathFailedCreation", mock.MatchedBy(func(path string) bool {
return path == "dashboards/one.json" || path == "dashboards/two.json" || path == "dashboards/three.json"
})).Return(false).Maybe()
repoResources.On("WriteResourceFromFile", mock.Anything, mock.MatchedBy(func(path string) bool {
return path == "dashboards/one.json" || path == "dashboards/two.json" || path == "dashboards/three.json"
}), "").Return("test-dashboard", schema.GroupVersionKind{Kind: "Dashboard", Group: "dashboards"}, nil).Maybe()
@@ -235,6 +239,7 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, compareFn *MockCompareFn) {
progress.On("TooManyErrors").Return(nil)
progress.On("HasDirPathFailedCreation", "dashboards/test.json").Return(false)
repoResources.On("WriteResourceFromFile", mock.Anything, "dashboards/test.json", "").
Return("test-dashboard", schema.GroupVersionKind{Kind: "Dashboard", Group: "dashboards"}, nil)
@@ -259,6 +264,7 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, compareFn *MockCompareFn) {
progress.On("TooManyErrors").Return(nil)
progress.On("HasDirPathFailedCreation", "dashboards/test.json").Return(false)
repoResources.On("WriteResourceFromFile", mock.Anything, "dashboards/test.json", "").
Return("test-dashboard", schema.GroupVersionKind{Kind: "Dashboard", Group: "dashboards"}, fmt.Errorf("write error"))
@@ -285,6 +291,7 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, compareFn *MockCompareFn) {
progress.On("TooManyErrors").Return(nil)
progress.On("HasDirPathFailedCreation", "dashboards/test.json").Return(false)
repoResources.On("WriteResourceFromFile", mock.Anything, "dashboards/test.json", "").
Return("test-dashboard", schema.GroupVersionKind{Kind: "Dashboard", Group: "dashboards"}, nil)
@@ -309,6 +316,7 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, compareFn *MockCompareFn) {
progress.On("TooManyErrors").Return(nil)
progress.On("HasDirPathFailedCreation", "dashboards/test.json").Return(false)
repoResources.On("WriteResourceFromFile", mock.Anything, "dashboards/test.json", "").
Return("test-dashboard", schema.GroupVersionKind{Kind: "Dashboard", Group: "dashboards"}, fmt.Errorf("write error"))
@@ -335,6 +343,7 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, compareFn *MockCompareFn) {
progress.On("TooManyErrors").Return(nil)
progress.On("HasDirPathFailedCreation", "one/two/three/").Return(false)
repoResources.On("EnsureFolderPathExist", mock.Anything, "one/two/three/").Return("some-folder", nil)
progress.On("Record", mock.Anything, jobs.JobResourceResult{
@@ -357,6 +366,7 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, compareFn *MockCompareFn) {
progress.On("TooManyErrors").Return(nil)
progress.On("HasDirPathFailedCreation", "one/two/three/").Return(false)
repoResources.On(
"EnsureFolderPathExist",
@@ -581,6 +591,7 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, compareFn *MockCompareFn) {
progress.On("TooManyErrors").Return(nil)
progress.On("HasDirPathFailedDeletion", "to-be-deleted/").Return(false)
scheme := runtime.NewScheme()
require.NoError(t, metav1.AddMetaToScheme(scheme))
@@ -640,6 +651,7 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, compareFn *MockCompareFn) {
progress.On("TooManyErrors").Return(nil)
progress.On("HasDirPathFailedDeletion", "to-be-deleted/").Return(false)
scheme := runtime.NewScheme()
require.NoError(t, metav1.AddMetaToScheme(scheme))
@@ -695,6 +707,7 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, compareFn *MockCompareFn) {
progress.On("TooManyErrors").Return(nil)
progress.On("HasDirPathFailedCreation", "dashboards/slow.json").Return(false)
repoResources.On("WriteResourceFromFile", mock.Anything, "dashboards/slow.json", "").
Run(func(args mock.Arguments) {

View File

@@ -60,7 +60,7 @@ func IncrementalSync(ctx context.Context, repo repository.Versioned, previousRef
if len(affectedFolders) > 0 {
cleanupStart := time.Now()
span.AddEvent("checking if impacted folders should be deleted", trace.WithAttributes(attribute.Int("affected_folders", len(affectedFolders))))
err := cleanupOrphanedFolders(ctx, repo, affectedFolders, repositoryResources, tracer)
err := cleanupOrphanedFolders(ctx, repo, affectedFolders, repositoryResources, tracer, progress)
metrics.RecordIncrementalSyncPhase(jobs.IncrementalSyncPhaseCleanup, time.Since(cleanupStart))
if err != nil {
return tracing.Error(span, fmt.Errorf("cleanup orphaned folders: %w", err))
@@ -85,6 +85,20 @@ func applyIncrementalChanges(ctx context.Context, diff []repository.VersionedFil
return nil, tracing.Error(span, err)
}
// Check if this resource is nested under a failed folder creation
// This only applies to creation/update/rename operations, not deletions
if change.Action != repository.FileActionDeleted && progress.HasDirPathFailedCreation(change.Path) {
// Skip this resource since its parent folder failed to be created
skipCtx, skipSpan := tracer.Start(ctx, "provisioning.sync.incremental.skip_nested_resource")
progress.Record(skipCtx, jobs.JobResourceResult{
Path: change.Path,
Action: repository.FileActionIgnored,
Warning: fmt.Errorf("skipped: parent folder creation failed"),
})
skipSpan.End()
continue
}
if err := resources.IsPathSupported(change.Path); err != nil {
ensureFolderCtx, ensureFolderSpan := tracer.Start(ctx, "provisioning.sync.incremental.ensure_folder_path_exist")
// Maintain the safe segment for empty folders
@@ -98,6 +112,19 @@ func applyIncrementalChanges(ctx context.Context, diff []repository.VersionedFil
if err != nil {
ensureFolderSpan.RecordError(err)
ensureFolderSpan.End()
// If this is a PathCreationError, record it as ignored and continue processing
// This allows nested resources to be skipped via HasDirPathFailedCreation
var pathErr *resources.PathCreationError
if errors.As(err, &pathErr) {
progress.Record(ensureFolderCtx, jobs.JobResourceResult{
Path: change.Path,
Action: repository.FileActionIgnored,
Error: err,
})
continue
}
return nil, tracing.Error(span, fmt.Errorf("unable to create empty file folder: %w", err))
}
@@ -185,6 +212,7 @@ func cleanupOrphanedFolders(
affectedFolders map[string]string,
repositoryResources resources.RepositoryResources,
tracer tracing.Tracer,
progress jobs.JobProgressRecorder,
) error {
ctx, span := tracer.Start(ctx, "provisioning.sync.incremental.cleanup_orphaned_folders")
defer span.End()
@@ -198,6 +226,12 @@ func cleanupOrphanedFolders(
for path, folderName := range affectedFolders {
span.SetAttributes(attribute.String("folder", folderName))
// Check if any resources under this folder failed to delete
if progress.HasDirPathFailedDeletion(path) {
span.AddEvent("skipping folder deletion: child resource deletions failed")
continue
}
// if we can no longer find the folder in git, then we can delete it from grafana
_, err := readerRepo.Read(ctx, path, "")
if err != nil && (errors.Is(err, repository.ErrFileNotFound) || apierrors.IsNotFound(err)) {

View File

@@ -0,0 +1,623 @@
package sync
import (
"context"
"fmt"
"testing"
"github.com/prometheus/client_golang/prometheus"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
"k8s.io/apimachinery/pkg/runtime/schema"
"github.com/grafana/grafana/apps/provisioning/pkg/repository"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/registry/apis/provisioning/jobs"
"github.com/grafana/grafana/pkg/registry/apis/provisioning/resources"
)
/*
TestIncrementalSync_HierarchicalErrorHandling tests the hierarchical error handling behavior:
FOLDER CREATION FAILURES:
- When EnsureFolderPathExist fails with PathCreationError, the path is tracked
- Subsequent resources under that path are skipped with FileActionIgnored
- Only the initial folder creation error counts toward error limits
- WriteResourceFromFile can also return PathCreationError for implicit folder creation
FOLDER DELETION FAILURES (cleanupOrphanedFolders):
- When RemoveResourceFromFile fails, path is tracked in failedDeletions
- In cleanupOrphanedFolders, HasDirPathFailedDeletion() is checked before RemoveFolder
- If children failed to delete, folder cleanup is skipped with a span event
DELETIONS NOT AFFECTED BY CREATION FAILURES:
- HasDirPathFailedCreation is NOT checked for FileActionDeleted
- Deletions proceed even if their parent folder failed to be created
- This handles cleanup of resources that exist from previous syncs
RENAME OPERATIONS:
- RenameResourceFile can return PathCreationError for the destination folder
- Renames are affected by failed destination folder creation
- Renames are NOT skipped due to source folder creation failures
AUTOMATIC TRACKING:
- Record() automatically detects PathCreationError via errors.As() and adds to failedCreations
- Record() automatically detects FileActionDeleted with error and adds to failedDeletions
- No manual tracking calls needed
*/
func TestIncrementalSync_HierarchicalErrorHandling(t *testing.T) { // nolint:gocyclo
tests := []struct {
name string
setupMocks func(*repository.MockVersioned, *resources.MockRepositoryResources, *jobs.MockJobProgressRecorder)
changes []repository.VersionedFileChange
previousRef string
currentRef string
description string
expectError bool
errorContains string
}{
{
name: "folder creation fails, nested file skipped",
description: "When unsupported/ fails to create via EnsureFolderPathExist, nested file is skipped",
previousRef: "old-ref",
currentRef: "new-ref",
changes: []repository.VersionedFileChange{
{Action: repository.FileActionCreated, Path: "unsupported/file.txt", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "unsupported/nested/file2.txt", Ref: "new-ref"},
},
setupMocks: func(repo *repository.MockVersioned, repoResources *resources.MockRepositoryResources, progress *jobs.MockJobProgressRecorder) {
// First file triggers folder creation which fails
progress.On("HasDirPathFailedCreation", "unsupported/file.txt").Return(false).Once()
folderErr := &resources.PathCreationError{Path: "unsupported/", Err: fmt.Errorf("permission denied")}
repoResources.On("EnsureFolderPathExist", mock.Anything, "unsupported/").Return("", folderErr).Once()
// First file recorded with error (note: error is from folder creation, but recorded against file)
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "unsupported/file.txt" &&
r.Action == repository.FileActionIgnored &&
r.Error != nil
})).Return().Once()
// Second file is skipped because parent folder failed
progress.On("HasDirPathFailedCreation", "unsupported/nested/file2.txt").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "unsupported/nested/file2.txt" &&
r.Action == repository.FileActionIgnored &&
r.Warning != nil &&
r.Warning.Error() == "skipped: parent folder creation failed"
})).Return().Once()
},
},
{
name: "WriteResourceFromFile returns PathCreationError, nested resources skipped",
description: "When WriteResourceFromFile implicitly creates a folder and fails, nested resources are skipped",
previousRef: "old-ref",
currentRef: "new-ref",
changes: []repository.VersionedFileChange{
{Action: repository.FileActionCreated, Path: "folder1/file1.json", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "folder1/file2.json", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "folder1/nested/file3.json", Ref: "new-ref"},
},
setupMocks: func(repo *repository.MockVersioned, repoResources *resources.MockRepositoryResources, progress *jobs.MockJobProgressRecorder) {
// First file write fails with PathCreationError
progress.On("HasDirPathFailedCreation", "folder1/file1.json").Return(false).Once()
folderErr := &resources.PathCreationError{Path: "folder1/", Err: fmt.Errorf("permission denied")}
repoResources.On("WriteResourceFromFile", mock.Anything, "folder1/file1.json", "new-ref").
Return("", schema.GroupVersionKind{}, folderErr).Once()
// First file recorded with error, automatically tracked
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/file1.json" &&
r.Action == repository.FileActionCreated &&
r.Error != nil
})).Return().Once()
// Subsequent files are skipped
progress.On("HasDirPathFailedCreation", "folder1/file2.json").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/file2.json" && r.Action == repository.FileActionIgnored && r.Warning != nil
})).Return().Once()
progress.On("HasDirPathFailedCreation", "folder1/nested/file3.json").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/nested/file3.json" && r.Action == repository.FileActionIgnored && r.Warning != nil
})).Return().Once()
},
},
{
name: "file deletion fails, folder cleanup skipped",
description: "When RemoveResourceFromFile fails, cleanupOrphanedFolders skips folder removal",
previousRef: "old-ref",
currentRef: "new-ref",
changes: []repository.VersionedFileChange{
{Action: repository.FileActionDeleted, Path: "dashboards/file1.json", PreviousRef: "old-ref"},
},
setupMocks: func(repo *repository.MockVersioned, repoResources *resources.MockRepositoryResources, progress *jobs.MockJobProgressRecorder) {
// File deletion fails (deletions don't check HasDirPathFailedCreation)
repoResources.On("RemoveResourceFromFile", mock.Anything, "dashboards/file1.json", "old-ref").
Return("dashboard-1", "folder-uid", schema.GroupVersionKind{Kind: "Dashboard"}, fmt.Errorf("permission denied")).Once()
// Error recorded, automatically tracked in failedDeletions
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "dashboards/file1.json" &&
r.Action == repository.FileActionDeleted &&
r.Error != nil
})).Return().Once()
// During cleanup, folder deletion is skipped
progress.On("HasDirPathFailedDeletion", "dashboards/").Return(true).Once()
// Note: RemoveFolder should NOT be called (verified via AssertNotCalled in test)
},
},
{
name: "deletion proceeds despite creation failure",
description: "When folder1/ creation fails, deletion of folder1/old.json still proceeds",
previousRef: "old-ref",
currentRef: "new-ref",
changes: []repository.VersionedFileChange{
{Action: repository.FileActionCreated, Path: "folder1/new.json", Ref: "new-ref"},
{Action: repository.FileActionDeleted, Path: "folder1/old.json", PreviousRef: "old-ref"},
},
setupMocks: func(repo *repository.MockVersioned, repoResources *resources.MockRepositoryResources, progress *jobs.MockJobProgressRecorder) {
// Creation fails
progress.On("HasDirPathFailedCreation", "folder1/new.json").Return(false).Once()
folderErr := &resources.PathCreationError{Path: "folder1/", Err: fmt.Errorf("permission denied")}
repoResources.On("WriteResourceFromFile", mock.Anything, "folder1/new.json", "new-ref").
Return("", schema.GroupVersionKind{}, folderErr).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/new.json" && r.Error != nil
})).Return().Once()
// Deletion proceeds (NOT checking HasDirPathFailedCreation for deletions)
repoResources.On("RemoveResourceFromFile", mock.Anything, "folder1/old.json", "old-ref").
Return("old-resource", "", schema.GroupVersionKind{Kind: "Dashboard"}, nil).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/old.json" &&
r.Action == repository.FileActionDeleted &&
r.Error == nil // Deletion succeeds!
})).Return().Once()
},
},
{
name: "multi-level nesting cascade",
description: "When level1/ fails, level1/level2/level3/file.json is also skipped",
previousRef: "old-ref",
currentRef: "new-ref",
changes: []repository.VersionedFileChange{
{Action: repository.FileActionCreated, Path: "level1/file.txt", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "level1/level2/file.txt", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "level1/level2/level3/file.txt", Ref: "new-ref"},
},
setupMocks: func(repo *repository.MockVersioned, repoResources *resources.MockRepositoryResources, progress *jobs.MockJobProgressRecorder) {
// First file triggers level1/ failure
progress.On("HasDirPathFailedCreation", "level1/file.txt").Return(false).Once()
folderErr := &resources.PathCreationError{Path: "level1/", Err: fmt.Errorf("permission denied")}
repoResources.On("EnsureFolderPathExist", mock.Anything, "level1/").Return("", folderErr).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "level1/file.txt" && r.Action == repository.FileActionIgnored
})).Return().Once()
// All nested files are skipped
for _, path := range []string{"level1/level2/file.txt", "level1/level2/level3/file.txt"} {
progress.On("HasDirPathFailedCreation", path).Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == path && r.Action == repository.FileActionIgnored
})).Return().Once()
}
},
},
{
name: "mixed success and failure",
description: "When success/ works and failure/ fails, only failure/* are skipped",
previousRef: "old-ref",
currentRef: "new-ref",
changes: []repository.VersionedFileChange{
{Action: repository.FileActionCreated, Path: "success/file1.json", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "success/nested/file2.json", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "failure/file3.txt", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "failure/nested/file4.txt", Ref: "new-ref"},
},
setupMocks: func(repo *repository.MockVersioned, repoResources *resources.MockRepositoryResources, progress *jobs.MockJobProgressRecorder) {
// Success path works
progress.On("HasDirPathFailedCreation", "success/file1.json").Return(false).Once()
repoResources.On("WriteResourceFromFile", mock.Anything, "success/file1.json", "new-ref").
Return("resource-1", schema.GroupVersionKind{Kind: "Dashboard"}, nil).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "success/file1.json" && r.Error == nil
})).Return().Once()
progress.On("HasDirPathFailedCreation", "success/nested/file2.json").Return(false).Once()
repoResources.On("WriteResourceFromFile", mock.Anything, "success/nested/file2.json", "new-ref").
Return("resource-2", schema.GroupVersionKind{Kind: "Dashboard"}, nil).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "success/nested/file2.json" && r.Error == nil
})).Return().Once()
// Failure path fails
progress.On("HasDirPathFailedCreation", "failure/file3.txt").Return(false).Once()
folderErr := &resources.PathCreationError{Path: "failure/", Err: fmt.Errorf("disk full")}
repoResources.On("EnsureFolderPathExist", mock.Anything, "failure/").Return("", folderErr).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "failure/file3.txt" && r.Action == repository.FileActionIgnored
})).Return().Once()
// Nested file in failure path is skipped
progress.On("HasDirPathFailedCreation", "failure/nested/file4.txt").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "failure/nested/file4.txt" && r.Action == repository.FileActionIgnored
})).Return().Once()
},
},
{
name: "rename with failed destination folder",
description: "When RenameResourceFile fails with PathCreationError for destination, rename is skipped",
previousRef: "old-ref",
currentRef: "new-ref",
changes: []repository.VersionedFileChange{
{
Action: repository.FileActionRenamed,
Path: "newfolder/file.json",
PreviousPath: "oldfolder/file.json",
Ref: "new-ref",
PreviousRef: "old-ref",
},
},
setupMocks: func(repo *repository.MockVersioned, repoResources *resources.MockRepositoryResources, progress *jobs.MockJobProgressRecorder) {
// Rename fails with PathCreationError for destination folder
progress.On("HasDirPathFailedCreation", "newfolder/file.json").Return(false).Once()
folderErr := &resources.PathCreationError{Path: "newfolder/", Err: fmt.Errorf("permission denied")}
repoResources.On("RenameResourceFile", mock.Anything, "oldfolder/file.json", "old-ref", "newfolder/file.json", "new-ref").
Return("", "", schema.GroupVersionKind{}, folderErr).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "newfolder/file.json" &&
r.Action == repository.FileActionRenamed &&
r.Error != nil
})).Return().Once()
},
},
{
name: "renamed file still checked, subsequent nested resources skipped",
description: "After rename fails for folder1/file.json, other folder1/* files are skipped",
previousRef: "old-ref",
currentRef: "new-ref",
changes: []repository.VersionedFileChange{
{Action: repository.FileActionRenamed, Path: "folder1/file1.json", PreviousPath: "old/file1.json", Ref: "new-ref", PreviousRef: "old-ref"},
{Action: repository.FileActionCreated, Path: "folder1/file2.json", Ref: "new-ref"},
},
setupMocks: func(repo *repository.MockVersioned, repoResources *resources.MockRepositoryResources, progress *jobs.MockJobProgressRecorder) {
// Rename is NOT skipped for creation failures (it's checking the destination path)
progress.On("HasDirPathFailedCreation", "folder1/file1.json").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/file1.json" &&
r.Action == repository.FileActionIgnored &&
r.Warning != nil && r.Warning.Error() == "skipped: parent folder creation failed"
})).Return().Once()
// Second file also skipped
progress.On("HasDirPathFailedCreation", "folder1/file2.json").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/file2.json" && r.Action == repository.FileActionIgnored && r.Warning != nil
})).Return().Once()
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
runHierarchicalErrorHandlingTest(t, tt)
})
}
}
type compositeRepoForTest struct {
*repository.MockVersioned
*repository.MockReader
}
func runHierarchicalErrorHandlingTest(t *testing.T, tt struct {
name string
setupMocks func(*repository.MockVersioned, *resources.MockRepositoryResources, *jobs.MockJobProgressRecorder)
changes []repository.VersionedFileChange
previousRef string
currentRef string
description string
expectError bool
errorContains string
}) {
var repo repository.Versioned
mockVersioned := repository.NewMockVersioned(t)
repoResources := resources.NewMockRepositoryResources(t)
progress := jobs.NewMockJobProgressRecorder(t)
// For tests that need cleanup (folder deletion), use composite repo
if tt.name == "file deletion fails, folder cleanup skipped" {
mockReader := repository.NewMockReader(t)
repo = &compositeRepoForTest{
MockVersioned: mockVersioned,
MockReader: mockReader,
}
} else {
repo = mockVersioned
}
mockVersioned.On("CompareFiles", mock.Anything, tt.previousRef, tt.currentRef).Return(tt.changes, nil)
progress.On("SetTotal", mock.Anything, len(tt.changes)).Return()
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
progress.On("TooManyErrors").Return(nil).Maybe()
tt.setupMocks(mockVersioned, repoResources, progress)
err := IncrementalSync(context.Background(), repo, tt.previousRef, tt.currentRef, repoResources, progress, tracing.NewNoopTracerService(), jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
if tt.expectError {
require.Error(t, err)
if tt.errorContains != "" {
require.Contains(t, err.Error(), tt.errorContains)
}
} else {
require.NoError(t, err)
}
progress.AssertExpectations(t)
repoResources.AssertExpectations(t)
// For deletion tests, verify RemoveFolder was NOT called
if tt.name == "file deletion fails, folder cleanup skipped" {
repoResources.AssertNotCalled(t, "RemoveFolder", mock.Anything, mock.Anything)
}
}
// TestIncrementalSync_HierarchicalErrorHandling_FailedFolderCreation tests nested resource skipping
func TestIncrementalSync_HierarchicalErrorHandling_FailedFolderCreation(t *testing.T) {
repo := repository.NewMockVersioned(t)
repoResources := resources.NewMockRepositoryResources(t)
progress := jobs.NewMockJobProgressRecorder(t)
changes := []repository.VersionedFileChange{
{Action: repository.FileActionCreated, Path: "unsupported/file.txt", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "unsupported/subfolder/file2.txt", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "unsupported/file3.json", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "other/file.json", Ref: "new-ref"},
}
repo.On("CompareFiles", mock.Anything, "old-ref", "new-ref").Return(changes, nil)
progress.On("SetTotal", mock.Anything, 4).Return()
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
progress.On("TooManyErrors").Return(nil).Maybe()
folderErr := &resources.PathCreationError{Path: "unsupported/", Err: fmt.Errorf("permission denied")}
// First check is before it fails.
progress.On("HasDirPathFailedCreation", "unsupported/file.txt").Return(false).Once()
repoResources.On("EnsureFolderPathExist", mock.Anything, "unsupported/").Return("", folderErr).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "unsupported/file.txt" && r.Action == repository.FileActionIgnored && r.Error != nil
})).Return().Once()
progress.On("HasDirPathFailedCreation", "unsupported/subfolder/file2.txt").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "unsupported/subfolder/file2.txt" && r.Action == repository.FileActionIgnored &&
r.Warning != nil && r.Warning.Error() == "skipped: parent folder creation failed"
})).Return().Once()
progress.On("HasDirPathFailedCreation", "unsupported/file3.json").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "unsupported/file3.json" && r.Action == repository.FileActionIgnored &&
r.Warning != nil && r.Warning.Error() == "skipped: parent folder creation failed"
})).Return().Once()
progress.On("HasDirPathFailedCreation", "other/file.json").Return(false).Once()
repoResources.On("WriteResourceFromFile", mock.Anything, "other/file.json", "new-ref").
Return("test-resource", schema.GroupVersionKind{Kind: "Dashboard"}, nil).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "other/file.json" && r.Action == repository.FileActionCreated && r.Error == nil
})).Return().Once()
err := IncrementalSync(context.Background(), repo, "old-ref", "new-ref", repoResources, progress, tracing.NewNoopTracerService(), jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
require.NoError(t, err)
progress.AssertExpectations(t)
}
// TestIncrementalSync_HierarchicalErrorHandling_FailedFileDeletion tests folder cleanup prevention
func TestIncrementalSync_HierarchicalErrorHandling_FailedFileDeletion(t *testing.T) {
mockVersioned := repository.NewMockVersioned(t)
mockReader := repository.NewMockReader(t)
repo := &compositeRepoForTest{MockVersioned: mockVersioned, MockReader: mockReader}
repoResources := resources.NewMockRepositoryResources(t)
progress := jobs.NewMockJobProgressRecorder(t)
changes := []repository.VersionedFileChange{
{Action: repository.FileActionDeleted, Path: "dashboards/file1.json", PreviousRef: "old-ref"},
}
mockVersioned.On("CompareFiles", mock.Anything, "old-ref", "new-ref").Return(changes, nil)
progress.On("SetTotal", mock.Anything, 1).Return()
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
progress.On("TooManyErrors").Return(nil).Maybe()
// Deletions don't check HasDirPathFailedCreation, they go straight to removal
repoResources.On("RemoveResourceFromFile", mock.Anything, "dashboards/file1.json", "old-ref").
Return("dashboard-1", "folder-uid", schema.GroupVersionKind{Kind: "Dashboard"}, fmt.Errorf("permission denied")).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "dashboards/file1.json" && r.Action == repository.FileActionDeleted &&
r.Error != nil && r.Error.Error() == "removing resource from file dashboards/file1.json: permission denied"
})).Return().Once()
progress.On("HasDirPathFailedDeletion", "dashboards/").Return(true).Once()
err := IncrementalSync(context.Background(), repo, "old-ref", "new-ref", repoResources, progress, tracing.NewNoopTracerService(), jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
require.NoError(t, err)
progress.AssertExpectations(t)
repoResources.AssertNotCalled(t, "RemoveFolder", mock.Anything, mock.Anything)
}
// TestIncrementalSync_HierarchicalErrorHandling_DeletionNotAffectedByCreationFailure tests deletions proceed despite creation failures
func TestIncrementalSync_HierarchicalErrorHandling_DeletionNotAffectedByCreationFailure(t *testing.T) {
repo := repository.NewMockVersioned(t)
repoResources := resources.NewMockRepositoryResources(t)
progress := jobs.NewMockJobProgressRecorder(t)
changes := []repository.VersionedFileChange{
{Action: repository.FileActionCreated, Path: "folder1/file.json", Ref: "new-ref"},
{Action: repository.FileActionDeleted, Path: "folder1/old.json", PreviousRef: "old-ref"},
}
repo.On("CompareFiles", mock.Anything, "old-ref", "new-ref").Return(changes, nil)
progress.On("SetTotal", mock.Anything, 2).Return()
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
progress.On("TooManyErrors").Return(nil).Maybe()
// Creation fails
progress.On("HasDirPathFailedCreation", "folder1/file.json").Return(false).Once()
repoResources.On("WriteResourceFromFile", mock.Anything, "folder1/file.json", "new-ref").
Return("", schema.GroupVersionKind{}, &resources.PathCreationError{Path: "folder1/", Err: fmt.Errorf("permission denied")}).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/file.json" && r.Error != nil
})).Return().Once()
// Deletion should NOT be skipped (not checking HasDirPathFailedCreation for deletions)
// Deletions don't check HasDirPathFailedCreation, they go straight to removal
repoResources.On("RemoveResourceFromFile", mock.Anything, "folder1/old.json", "old-ref").
Return("old-resource", "", schema.GroupVersionKind{Kind: "Dashboard"}, nil).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/old.json" && r.Action == repository.FileActionDeleted && r.Error == nil
})).Return().Once()
err := IncrementalSync(context.Background(), repo, "old-ref", "new-ref", repoResources, progress, tracing.NewNoopTracerService(), jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
require.NoError(t, err)
progress.AssertExpectations(t)
}
// TestIncrementalSync_HierarchicalErrorHandling_MultiLevelNesting tests multi-level cascade
func TestIncrementalSync_HierarchicalErrorHandling_MultiLevelNesting(t *testing.T) {
repo := repository.NewMockVersioned(t)
repoResources := resources.NewMockRepositoryResources(t)
progress := jobs.NewMockJobProgressRecorder(t)
changes := []repository.VersionedFileChange{
{Action: repository.FileActionCreated, Path: "level1/file.txt", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "level1/level2/file.txt", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "level1/level2/level3/file.txt", Ref: "new-ref"},
}
repo.On("CompareFiles", mock.Anything, "old-ref", "new-ref").Return(changes, nil)
progress.On("SetTotal", mock.Anything, 3).Return()
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
progress.On("TooManyErrors").Return(nil).Maybe()
folderErr := &resources.PathCreationError{Path: "level1/", Err: fmt.Errorf("permission denied")}
// First check is before it fails.
progress.On("HasDirPathFailedCreation", "level1/file.txt").Return(false).Once()
repoResources.On("EnsureFolderPathExist", mock.Anything, "level1/").Return("", folderErr).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "level1/file.txt" && r.Action == repository.FileActionIgnored && r.Error != nil
})).Return().Once()
progress.On("HasDirPathFailedCreation", "level1/level2/file.txt").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "level1/level2/file.txt" && r.Action == repository.FileActionIgnored &&
r.Warning != nil && r.Warning.Error() == "skipped: parent folder creation failed"
})).Return().Once()
progress.On("HasDirPathFailedCreation", "level1/level2/level3/file.txt").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "level1/level2/level3/file.txt" && r.Action == repository.FileActionIgnored &&
r.Warning != nil && r.Warning.Error() == "skipped: parent folder creation failed"
})).Return().Once()
err := IncrementalSync(context.Background(), repo, "old-ref", "new-ref", repoResources, progress, tracing.NewNoopTracerService(), jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
require.NoError(t, err)
progress.AssertExpectations(t)
}
// TestIncrementalSync_HierarchicalErrorHandling_MixedSuccessAndFailure tests partial failures
func TestIncrementalSync_HierarchicalErrorHandling_MixedSuccessAndFailure(t *testing.T) {
repo := repository.NewMockVersioned(t)
repoResources := resources.NewMockRepositoryResources(t)
progress := jobs.NewMockJobProgressRecorder(t)
changes := []repository.VersionedFileChange{
{Action: repository.FileActionCreated, Path: "success/file1.json", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "success/nested/file2.json", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "failure/file3.txt", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "failure/nested/file4.txt", Ref: "new-ref"},
}
repo.On("CompareFiles", mock.Anything, "old-ref", "new-ref").Return(changes, nil)
progress.On("SetTotal", mock.Anything, 4).Return()
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
progress.On("TooManyErrors").Return(nil).Maybe()
progress.On("HasDirPathFailedCreation", "success/file1.json").Return(false).Once()
repoResources.On("WriteResourceFromFile", mock.Anything, "success/file1.json", "new-ref").
Return("resource-1", schema.GroupVersionKind{Kind: "Dashboard"}, nil).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "success/file1.json" && r.Action == repository.FileActionCreated && r.Error == nil
})).Return().Once()
progress.On("HasDirPathFailedCreation", "success/nested/file2.json").Return(false).Once()
repoResources.On("WriteResourceFromFile", mock.Anything, "success/nested/file2.json", "new-ref").
Return("resource-2", schema.GroupVersionKind{Kind: "Dashboard"}, nil).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "success/nested/file2.json" && r.Action == repository.FileActionCreated && r.Error == nil
})).Return().Once()
folderErr := &resources.PathCreationError{Path: "failure/", Err: fmt.Errorf("disk full")}
progress.On("HasDirPathFailedCreation", "failure/file3.txt").Return(false).Once()
repoResources.On("EnsureFolderPathExist", mock.Anything, "failure/").Return("", folderErr).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "failure/file3.txt" && r.Action == repository.FileActionIgnored
})).Return().Once()
progress.On("HasDirPathFailedCreation", "failure/nested/file4.txt").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "failure/nested/file4.txt" && r.Action == repository.FileActionIgnored &&
r.Warning != nil && r.Warning.Error() == "skipped: parent folder creation failed"
})).Return().Once()
err := IncrementalSync(context.Background(), repo, "old-ref", "new-ref", repoResources, progress, tracing.NewNoopTracerService(), jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
require.NoError(t, err)
progress.AssertExpectations(t)
repoResources.AssertExpectations(t)
}
// TestIncrementalSync_HierarchicalErrorHandling_RenameWithFailedFolderCreation tests rename operations affected by folder failures
func TestIncrementalSync_HierarchicalErrorHandling_RenameWithFailedFolderCreation(t *testing.T) {
repo := repository.NewMockVersioned(t)
repoResources := resources.NewMockRepositoryResources(t)
progress := jobs.NewMockJobProgressRecorder(t)
changes := []repository.VersionedFileChange{
{Action: repository.FileActionRenamed, Path: "newfolder/file.json", PreviousPath: "oldfolder/file.json", Ref: "new-ref", PreviousRef: "old-ref"},
}
repo.On("CompareFiles", mock.Anything, "old-ref", "new-ref").Return(changes, nil)
progress.On("SetTotal", mock.Anything, 1).Return()
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
progress.On("TooManyErrors").Return(nil).Maybe()
progress.On("HasDirPathFailedCreation", "newfolder/file.json").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "newfolder/file.json" && r.Action == repository.FileActionIgnored &&
r.Warning != nil && r.Warning.Error() == "skipped: parent folder creation failed"
})).Return().Once()
err := IncrementalSync(context.Background(), repo, "old-ref", "new-ref", repoResources, progress, tracing.NewNoopTracerService(), jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
require.NoError(t, err)
progress.AssertExpectations(t)
}

View File

@@ -92,6 +92,10 @@ func TestIncrementalSync(t *testing.T) {
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
// Mock HasDirPathFailedCreation checks
progress.On("HasDirPathFailedCreation", "dashboards/test.json").Return(false)
progress.On("HasDirPathFailedCreation", "alerts/alert.yaml").Return(false)
// Mock successful resource writes
repoResources.On("WriteResourceFromFile", mock.Anything, "dashboards/test.json", "new-ref").
Return("test-dashboard", schema.GroupVersionKind{Kind: "Dashboard", Group: "dashboards"}, nil)
@@ -127,6 +131,9 @@ func TestIncrementalSync(t *testing.T) {
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
// Mock HasDirPathFailedCreation check
progress.On("HasDirPathFailedCreation", "unsupported/path/file.txt").Return(false)
// Mock folder creation
repoResources.On("EnsureFolderPathExist", mock.Anything, "unsupported/path/").
Return("test-folder", nil)
@@ -161,6 +168,9 @@ func TestIncrementalSync(t *testing.T) {
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
// Mock HasDirPathFailedCreation check
progress.On("HasDirPathFailedCreation", ".unsupported/path/file.txt").Return(false)
progress.On("Record", mock.Anything, jobs.JobResourceResult{
Action: repository.FileActionIgnored,
Path: ".unsupported/path/file.txt",
@@ -222,6 +232,9 @@ func TestIncrementalSync(t *testing.T) {
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
// Mock HasDirPathFailedCreation check
progress.On("HasDirPathFailedCreation", "dashboards/new.json").Return(false)
// Mock resource rename
repoResources.On("RenameResourceFile", mock.Anything, "dashboards/old.json", "old-ref", "dashboards/new.json", "new-ref").
Return("renamed-dashboard", "", schema.GroupVersionKind{Kind: "Dashboard", Group: "dashboards"}, nil)
@@ -254,6 +267,10 @@ func TestIncrementalSync(t *testing.T) {
progress.On("SetTotal", mock.Anything, 1).Return()
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
// Mock HasDirPathFailedCreation check
progress.On("HasDirPathFailedCreation", "dashboards/ignored.json").Return(false)
progress.On("Record", mock.Anything, jobs.JobResourceResult{
Action: repository.FileActionIgnored,
Path: "dashboards/ignored.json",
@@ -278,6 +295,9 @@ func TestIncrementalSync(t *testing.T) {
progress.On("SetTotal", mock.Anything, 1).Return()
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
// Mock HasDirPathFailedCreation check
progress.On("HasDirPathFailedCreation", "unsupported/path/file.txt").Return(false)
// Mock folder creation error
repoResources.On("EnsureFolderPathExist", mock.Anything, "unsupported/path/").
Return("", fmt.Errorf("failed to create folder"))
@@ -303,6 +323,9 @@ func TestIncrementalSync(t *testing.T) {
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
// Mock HasDirPathFailedCreation check
progress.On("HasDirPathFailedCreation", "dashboards/test.json").Return(false)
// Mock resource write error
repoResources.On("WriteResourceFromFile", mock.Anything, "dashboards/test.json", "new-ref").
Return("test-dashboard", schema.GroupVersionKind{Kind: "Dashboard", Group: "dashboards"}, fmt.Errorf("write failed"))
@@ -372,7 +395,8 @@ func TestIncrementalSync(t *testing.T) {
repo.On("CompareFiles", mock.Anything, "old-ref", "new-ref").Return(changes, nil)
progress.On("SetTotal", mock.Anything, 1).Return()
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
// Mock too many errors
// Mock too many errors - this is checked before processing files, so HasDirPathFailedCreation won't be called
progress.On("TooManyErrors").Return(fmt.Errorf("too many errors occurred"))
},
previousRef: "old-ref",
@@ -428,6 +452,9 @@ func TestIncrementalSync_CleanupOrphanedFolders(t *testing.T) {
repoResources.On("RemoveResourceFromFile", mock.Anything, "dashboards/old.json", "old-ref").
Return("old-dashboard", "folder-uid", schema.GroupVersionKind{Kind: "Dashboard", Group: "dashboards"}, nil)
// Mock HasDirPathFailedDeletion check for cleanup
progress.On("HasDirPathFailedDeletion", "dashboards/").Return(false)
// if the folder is not found in git, there should be a call to remove the folder from grafana
repo.MockReader.On("Read", mock.Anything, "dashboards/", "").
Return((*repository.FileInfo)(nil), repository.ErrFileNotFound)
@@ -453,6 +480,10 @@ func TestIncrementalSync_CleanupOrphanedFolders(t *testing.T) {
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
repoResources.On("RemoveResourceFromFile", mock.Anything, "dashboards/old.json", "old-ref").
Return("old-dashboard", "folder-uid", schema.GroupVersionKind{Kind: "Dashboard", Group: "dashboards"}, nil)
// Mock HasDirPathFailedDeletion check for cleanup
progress.On("HasDirPathFailedDeletion", "dashboards/").Return(false)
// if the folder still exists in git, there should not be a call to delete it from grafana
repo.MockReader.On("Read", mock.Anything, "dashboards/", "").
Return(&repository.FileInfo{}, nil)
@@ -485,6 +516,13 @@ func TestIncrementalSync_CleanupOrphanedFolders(t *testing.T) {
repoResources.On("RemoveResourceFromFile", mock.Anything, "alerts/old-alert.yaml", "old-ref").
Return("old-alert", "folder-uid-2", schema.GroupVersionKind{Kind: "Alert", Group: "alerts"}, nil)
progress.On("Record", mock.Anything, mock.Anything).Return()
progress.On("TooManyErrors").Return(nil)
// Mock HasDirPathFailedDeletion checks for cleanup
progress.On("HasDirPathFailedDeletion", "dashboards/").Return(false)
progress.On("HasDirPathFailedDeletion", "alerts/").Return(false)
// both not found in git, both should be deleted
repo.MockReader.On("Read", mock.Anything, "dashboards/", "").
Return((*repository.FileInfo)(nil), repository.ErrFileNotFound)
@@ -492,9 +530,6 @@ func TestIncrementalSync_CleanupOrphanedFolders(t *testing.T) {
Return((*repository.FileInfo)(nil), repository.ErrFileNotFound)
repoResources.On("RemoveFolder", mock.Anything, "folder-uid-1").Return(nil)
repoResources.On("RemoveFolder", mock.Anything, "folder-uid-2").Return(nil)
progress.On("Record", mock.Anything, mock.Anything).Return()
progress.On("TooManyErrors").Return(nil)
},
},
}

View File

@@ -20,6 +20,21 @@ import (
const MaxNumberOfFolders = 10000
// PathCreationError represents an error that occurred while creating a folder path.
// It contains the path that failed and the underlying error.
type PathCreationError struct {
Path string
Err error
}
func (e *PathCreationError) Unwrap() error {
return e.Err
}
func (e *PathCreationError) Error() string {
return fmt.Sprintf("failed to create path %s: %v", e.Path, e.Err)
}
type FolderManager struct {
repo repository.ReaderWriter
tree FolderTree
@@ -73,7 +88,11 @@ func (fm *FolderManager) EnsureFolderPathExist(ctx context.Context, filePath str
}
if err := fm.EnsureFolderExists(ctx, f, parent); err != nil {
return fmt.Errorf("ensure folder exists: %w", err)
// Wrap in PathCreationError to indicate which path failed
return &PathCreationError{
Path: f.Path,
Err: fmt.Errorf("ensure folder exists: %w", err),
}
}
fm.tree.Add(f, parent)

View File

@@ -4,6 +4,7 @@ import (
"context"
"fmt"
"net/url"
"path"
"strings"
"time"
@@ -140,20 +141,14 @@ func (e *evaluator) evaluateFile(ctx context.Context, repo repository.Reader, ba
if info.Parsed.GVK.Kind == dashboardKind {
// FIXME: extract the logic out of a dashboard URL builder/injector or similar
// for testability and decoupling
urlBuilder, err := url.Parse(baseURL)
if err != nil {
info.Error = err.Error()
return info
}
if info.Parsed.Existing != nil {
grafanaURL := urlBuilder.JoinPath("d", obj.GetName(), slugify.Slugify(info.Title))
info.GrafanaURL = grafanaURL.String()
info.GrafanaURL = fmt.Sprintf("%sd/%s/%s", baseURL, obj.GetName(),
slugify.Slugify(info.Title))
}
// Load this file directly
previewURL := urlBuilder.JoinPath("admin/provisioning", info.Parsed.Repo.Name, "dashboard/preview", info.Parsed.Info.Path)
info.PreviewURL = previewURL.String()
info.PreviewURL = baseURL + path.Join("admin/provisioning",
info.Parsed.Repo.Name, "dashboard/preview", info.Parsed.Info.Path)
query := url.Values{}
query.Set("ref", info.Parsed.Info.Ref)

View File

@@ -737,78 +737,8 @@ func TestCalculateChanges(t *testing.T) {
Path: "path/to/file.json",
Ref: "ref",
},
Error: "parse \"ht tp://bad url/\": first path segment in URL cannot contain colon",
}},
},
},
{
name: "path with spaces",
setupMocks: func(parser *resources.MockParser, reader *repository.MockReader, progress *jobs.MockJobProgressRecorder, renderer *MockScreenshotRenderer, parserFactory *resources.MockParserFactory) {
finfo := &repository.FileInfo{
Path: "path/to/file with spaces.json",
Ref: "ref",
Data: []byte("xxxx"),
}
obj := &unstructured.Unstructured{
Object: map[string]interface{}{
"apiVersion": resources.DashboardResource.GroupVersion().String(),
"kind": dashboardKind,
"metadata": map[string]interface{}{
"name": "the-uid",
},
"spec": map[string]interface{}{
"title": "hello world",
},
},
}
meta, _ := utils.MetaAccessor(obj)
progress.On("SetMessage", mock.Anything, "process path/to/file with spaces.json").Return()
reader.On("Read", mock.Anything, "path/to/file with spaces.json", "ref").Return(finfo, nil)
reader.On("Config").Return(&provisioning.Repository{
ObjectMeta: metav1.ObjectMeta{
Name: "test-repo",
Namespace: "x",
},
Spec: provisioning.RepositorySpec{
GitHub: &provisioning.GitHubRepositoryConfig{
GenerateDashboardPreviews: true,
},
},
})
parser.On("Parse", mock.Anything, finfo).Return(&resources.ParsedResource{
Info: finfo,
Repo: provisioning.ResourceRepositoryInfo{
Namespace: "x",
Name: "y",
},
GVK: schema.GroupVersionKind{
Kind: dashboardKind,
},
Obj: obj,
Existing: obj,
Meta: meta,
DryRunResponse: obj,
}, nil)
renderer.On("IsAvailable", mock.Anything, mock.Anything).Return(false)
parserFactory.On("GetParser", mock.Anything, mock.Anything).Return(parser, nil)
},
changes: []repository.VersionedFileChange{{
Action: repository.FileActionCreated,
Path: "path/to/file with spaces.json",
Ref: "ref",
}},
expectedInfo: changeInfo{
Changes: []fileChangeInfo{{
Change: repository.VersionedFileChange{
Action: repository.FileActionCreated,
Path: "path/to/file with spaces.json",
Ref: "ref",
},
GrafanaURL: "http://host/d/the-uid/hello-world",
PreviewURL: "http://host/admin/provisioning/y/dashboard/preview/path/to/file%20with%20spaces.json?pull_request_url=http%253A%252F%252Fgithub.com%252Fpr%252F&ref=ref",
GrafanaScreenshotURL: "",
PreviewScreenshotURL: "",
GrafanaURL: "ht tp://bad url/d/the-uid/hello-world", // Malformed URL
PreviewURL: "ht tp://bad url/admin/provisioning/y/dashboard/preview/path/to/file.json?pull_request_url=http%253A%252F%252Fgithub.com%252Fpr%252F&ref=ref",
}},
},
},

View File

@@ -357,7 +357,7 @@ func (srv PrometheusSrv) RouteGetRuleStatuses(c *contextmodel.ReqContext) respon
type RuleStatusMutator func(source *ngmodels.AlertRule, toMutate *apimodels.AlertingRule)
// mutator function used to attach alert states to the rule and returns the totals and filtered totals
type RuleAlertStateMutator func(source *ngmodels.AlertRule, toMutate *apimodels.AlertingRule, stateFilterSet map[eval.State]struct{}, matchers labels.Matchers, labelOptions []ngmodels.LabelOption, limitAlerts int64) (total map[string]int64, filteredTotal map[string]int64)
type RuleAlertStateMutator func(source *ngmodels.AlertRule, toMutate *apimodels.AlertingRule, stateFilterSet map[eval.State]struct{}, matchers labels.Matchers, labelOptions []ngmodels.LabelOption) (total map[string]int64, filteredTotal map[string]int64)
func RuleStatusMutatorGenerator(statusReader StatusReader) RuleStatusMutator {
return func(source *ngmodels.AlertRule, toMutate *apimodels.AlertingRule) {
@@ -377,18 +377,32 @@ func RuleStatusMutatorGenerator(statusReader StatusReader) RuleStatusMutator {
}
func RuleAlertStateMutatorGenerator(manager state.AlertInstanceManager) RuleAlertStateMutator {
return func(source *ngmodels.AlertRule, toMutate *apimodels.AlertingRule, stateFilterSet map[eval.State]struct{}, matchers labels.Matchers, labelOptions []ngmodels.LabelOption, limitAlerts int64) (map[string]int64, map[string]int64) {
return func(source *ngmodels.AlertRule, toMutate *apimodels.AlertingRule, stateFilterSet map[eval.State]struct{}, matchers labels.Matchers, labelOptions []ngmodels.LabelOption) (map[string]int64, map[string]int64) {
states := manager.GetStatesForRuleUID(source.OrgID, source.UID)
totals := make(map[string]int64)
totalsFiltered := make(map[string]int64)
for _, alertState := range states {
activeAt := alertState.StartsAt
valString := ""
if alertState.State == eval.Alerting || alertState.State == eval.Pending || alertState.State == eval.Recovering {
valString = FormatValues(alertState)
}
stateKey := strings.ToLower(alertState.State.String())
totals[stateKey] += 1
// Do not add error twice when execution error state is Error
if alertState.Error != nil && source.ExecErrState != ngmodels.ErrorErrState {
totals["error"] += 1
}
alert := apimodels.Alert{
Labels: apimodels.LabelsFromMap(alertState.GetLabels(labelOptions...)),
Annotations: apimodels.LabelsFromMap(alertState.Annotations),
// TODO: or should we make this two fields? Using one field lets the
// frontend use the same logic for parsing text on annotations and this.
State: state.FormatStateAndReason(alertState.State, alertState.StateReason),
ActiveAt: &activeAt,
Value: valString,
}
// Set the state of the rule based on the state of its alerts.
// Only update the rule state with 'pending' or 'recovering' if the current state is 'inactive'.
@@ -428,23 +442,7 @@ func RuleAlertStateMutatorGenerator(manager state.AlertInstanceManager) RuleAler
totalsFiltered["error"] += 1
}
if limitAlerts != 0 {
valString := ""
if alertState.State == eval.Alerting || alertState.State == eval.Pending || alertState.State == eval.Recovering {
valString = FormatValues(alertState)
}
toMutate.Alerts = append(toMutate.Alerts, apimodels.Alert{
Labels: apimodels.LabelsFromMap(alertState.GetLabels(labelOptions...)),
Annotations: apimodels.LabelsFromMap(alertState.Annotations),
// TODO: or should we make this two fields? Using one field lets the
// frontend use the same logic for parsing text on annotations and this.
State: state.FormatStateAndReason(alertState.State, alertState.StateReason),
ActiveAt: &activeAt,
Value: valString,
})
}
toMutate.Alerts = append(toMutate.Alerts, alert)
}
return totals, totalsFiltered
}
@@ -1229,7 +1227,7 @@ func toRuleGroup(log log.Logger, groupKey ngmodels.AlertRuleGroupKey, folderFull
}
// mutate rule for alert states
totals, totalsFiltered := ruleAlertStateMutator(rule, &alertingRule, stateFilterSet, matchers, labelOptions, limitAlerts)
totals, totalsFiltered := ruleAlertStateMutator(rule, &alertingRule, stateFilterSet, matchers, labelOptions)
if alertingRule.State != "" {
rulesTotals[alertingRule.State] += 1

View File

@@ -39,8 +39,7 @@
"inputs": [
"{workspaceRoot}/scripts/cli/generateSassVariableFiles.ts",
"{workspaceRoot}/packages/grafana-data/src/themes/**",
"{workspaceRoot}/packages/grafana-ui/src/themes/**",
"{workspaceRoot}/package.json"
"{workspaceRoot}/packages/grafana-ui/src/themes/**"
],
"outputs": [
"{workspaceRoot}/public/sass/_variables.generated.scss",

View File

@@ -3,7 +3,7 @@ import { ComponentType, useEffect } from 'react';
import { connect, ConnectedProps } from 'react-redux';
import { GrafanaTheme2 } from '@grafana/data';
import { selectors as e2eSelectors } from '@grafana/e2e-selectors';
import { selectors as e2eSelectors } from '@grafana/e2e-selectors/src';
import { Trans, t } from '@grafana/i18n';
import { LinkButton, RadioButtonGroup, useStyles2, FilterInput, EmptyState } from '@grafana/ui';
import { Page } from 'app/core/components/Page/Page';

View File

@@ -3,7 +3,7 @@ import { useEffect } from 'react';
import { connect, ConnectedProps } from 'react-redux';
import { GrafanaTheme2 } from '@grafana/data';
import { selectors as e2eSelectors } from '@grafana/e2e-selectors';
import { selectors as e2eSelectors } from '@grafana/e2e-selectors/src';
import { t } from '@grafana/i18n';
import { RadioButtonGroup, useStyles2, FilterInput } from '@grafana/ui';
import { Page } from 'app/core/components/Page/Page';

View File

@@ -1,7 +1,7 @@
import { render, screen } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import { selectors as e2eSelectors } from '@grafana/e2e-selectors';
import { selectors as e2eSelectors } from '@grafana/e2e-selectors/src';
import { GrafanaBootConfig } from '@grafana/runtime';
import config from 'app/core/config';

View File

@@ -1,7 +1,7 @@
import { css, cx } from '@emotion/css';
import { GrafanaTheme2 } from '@grafana/data';
import { selectors as e2eSelectors } from '@grafana/e2e-selectors';
import { selectors as e2eSelectors } from '@grafana/e2e-selectors/src';
import { Trans, t } from '@grafana/i18n';
import { Button, LoadingPlaceholder, Modal, ModalsController, useStyles2 } from '@grafana/ui';
import {

View File

@@ -1,4 +1,4 @@
import { selectors as e2eSelectors } from '@grafana/e2e-selectors';
import { selectors as e2eSelectors } from '@grafana/e2e-selectors/src';
import { Trans, t } from '@grafana/i18n';
import { Icon, Stack, Tag, Tooltip } from '@grafana/ui';
import { Page } from 'app/core/components/Page/Page';

View File

@@ -22,7 +22,7 @@ export function initAlerting() {
component: ({ dashboard }) =>
alertingEnabled ? (
<Suspense fallback={null} key="alert-rules-button">
{dashboard && dashboard.uid && <AlertRulesToolbarButton dashboardUid={dashboard.uid} />}
{dashboard && <AlertRulesToolbarButton dashboardUid={dashboard.uid} />}
</Suspense>
) : null,
index: -2,

View File

@@ -76,6 +76,12 @@ export function DashboardEditPaneRenderer({ editPane, dashboard, isDocked }: Pro
data-testid={selectors.pages.Dashboard.Sidebar.optionsButton}
active={selectedObject === dashboard ? true : false}
/>
{/* <Sidebar.Button
tooltip={t('dashboard.sidebar.edit-schema.tooltip', 'Edit as code')}
title={t('dashboard.sidebar.edit-schema.title', 'Code')}
icon="brackets-curly"
onClick={() => dashboard.openV2SchemaEditor()}
/> */}
<Sidebar.Divider />
</>
)}

View File

@@ -16,7 +16,7 @@ import {
sceneUtils,
VizPanel,
} from '@grafana/scenes';
import { LibraryPanel } from '@grafana/schema';
import { LibraryPanel } from '@grafana/schema/';
import { Alert, Button, CodeEditor, Field, Select, useStyles2 } from '@grafana/ui';
import { isDashboardV2Spec } from 'app/features/dashboard/api/utils';
import { getPanelDataFrames } from 'app/features/dashboard/components/HelpWizard/utils';

View File

@@ -1,7 +1,7 @@
import { css, cx } from '@emotion/css';
import { memo, useMemo } from 'react';
import { GrafanaTheme2 } from '@grafana/data';
import { GrafanaTheme2 } from '@grafana/data/';
import { LazyLoader, SceneComponentProps, VizPanel } from '@grafana/scenes';
import { useStyles2 } from '@grafana/ui';

View File

@@ -1,4 +1,4 @@
import { selectors as e2eSelectors } from '@grafana/e2e-selectors';
import { selectors as e2eSelectors } from '@grafana/e2e-selectors/src';
import { Trans } from '@grafana/i18n';
import { locationService } from '@grafana/runtime';
import { Button } from '@grafana/ui';

View File

@@ -1,7 +1,7 @@
import { css, cx } from '@emotion/css';
import { GrafanaTheme2 } from '@grafana/data';
import { selectors as e2eSelectors } from '@grafana/e2e-selectors';
import { selectors as e2eSelectors } from '@grafana/e2e-selectors/src';
import { Trans, t } from '@grafana/i18n';
import { useStyles2 } from '@grafana/ui';

View File

@@ -2,7 +2,7 @@ import { css } from '@emotion/css';
import { useForm } from 'react-hook-form';
import { GrafanaTheme2, TimeRange } from '@grafana/data';
import { selectors as e2eSelectors } from '@grafana/e2e-selectors';
import { selectors as e2eSelectors } from '@grafana/e2e-selectors/src';
import { Trans, t } from '@grafana/i18n';
import { Button, ClipboardButton, Field, Input, Stack, Label, ModalsController, Switch, useStyles2 } from '@grafana/ui';
import {

View File

@@ -1,7 +1,7 @@
import { UseFormRegister } from 'react-hook-form';
import { TimeRange } from '@grafana/data';
import { selectors as e2eSelectors } from '@grafana/e2e-selectors';
import { selectors as e2eSelectors } from '@grafana/e2e-selectors/src';
import { Trans, t } from '@grafana/i18n';
import { FieldSet, Label, Switch, TimeRangeInput, Stack } from '@grafana/ui';
import { DashboardInteractions } from 'app/features/dashboard-scene/utils/interactions';

View File

@@ -2,7 +2,7 @@ import { css } from '@emotion/css';
import { UseFormRegister } from 'react-hook-form';
import { GrafanaTheme2 } from '@grafana/data';
import { selectors as e2eSelectors } from '@grafana/e2e-selectors';
import { selectors as e2eSelectors } from '@grafana/e2e-selectors/src';
import { Trans, t } from '@grafana/i18n';
import { Checkbox, FieldSet, LinkButton, useStyles2, Stack } from '@grafana/ui';

View File

@@ -1,4 +1,4 @@
import { selectors as e2eSelectors } from '@grafana/e2e-selectors';
import { selectors as e2eSelectors } from '@grafana/e2e-selectors/src';
import { Trans, t } from '@grafana/i18n';
import { Alert } from '@grafana/ui';

View File

@@ -2,7 +2,7 @@ import { css } from '@emotion/css';
import cx from 'classnames';
import { GrafanaTheme2 } from '@grafana/data';
import { selectors as e2eSelectors } from '@grafana/e2e-selectors';
import { selectors as e2eSelectors } from '@grafana/e2e-selectors/src';
import { Trans, t } from '@grafana/i18n';
import { Alert, useStyles2 } from '@grafana/ui';

View File

@@ -1,4 +1,4 @@
import { selectors as e2eSelectors } from '@grafana/e2e-selectors';
import { selectors as e2eSelectors } from '@grafana/e2e-selectors/src';
import { Trans, t } from '@grafana/i18n';
import { Alert } from '@grafana/ui';

View File

@@ -4,7 +4,7 @@ import { http, HttpResponse } from 'msw';
import { setupServer } from 'msw/node';
import { BootData, DataQuery } from '@grafana/data';
import { selectors as e2eSelectors } from '@grafana/e2e-selectors';
import { selectors as e2eSelectors } from '@grafana/e2e-selectors/src';
import { reportInteraction, setEchoSrv } from '@grafana/runtime';
import { Panel } from '@grafana/schema';
import config from 'app/core/config';

View File

@@ -5,7 +5,7 @@ import { useEffectOnce } from 'react-use';
import { Props as AutoSizerProps } from 'react-virtualized-auto-sizer';
import { render } from 'test/test-utils';
import { selectors as e2eSelectors } from '@grafana/e2e-selectors';
import { selectors as e2eSelectors } from '@grafana/e2e-selectors/src';
import { Dashboard, DashboardCursorSync, FieldConfigSource, Panel, ThresholdsMode } from '@grafana/schema/src';
import { getRouteComponentProps } from 'app/core/navigation/mocks/routeProps';
import { DashboardInitPhase, DashboardMeta, DashboardRoutes } from 'app/types/dashboard';

View File

@@ -4,7 +4,7 @@ import { useLocation, useParams } from 'react-router-dom-v5-compat';
import { usePrevious } from 'react-use';
import { GrafanaTheme2, PageLayoutType, TimeZone } from '@grafana/data';
import { selectors as e2eSelectors } from '@grafana/e2e-selectors';
import { selectors as e2eSelectors } from '@grafana/e2e-selectors/src';
import { PageToolbar, useStyles2 } from '@grafana/ui';
import { Page } from 'app/core/components/Page/Page';
import { useGrafana } from 'app/core/context/GrafanaContext';

View File

@@ -3,7 +3,7 @@ import { useCopyToClipboard } from 'react-use';
import { Field, GrafanaTheme2 } from '@grafana/data';
import { t } from '@grafana/i18n';
import { isValidLegacyName, utf8Support } from '@grafana/prometheus';
import { isValidLegacyName, utf8Support } from '@grafana/prometheus/src/utf8_support';
import { reportInteraction } from '@grafana/runtime';
import { IconButton, useStyles2 } from '@grafana/ui';

View File

@@ -0,0 +1,262 @@
import { render, screen, waitFor } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import { useState } from 'react';
import { DEFAULT_SPAN_FILTERS } from 'app/features/explore/state/constants';
import { Trace } from '../../types/trace';
import { SpanFilters } from './SpanFilters';
const trace: Trace = {
traceID: '1ed38015486087ca',
spans: [
{
traceID: '1ed38015486087ca',
spanID: '1ed38015486087ca',
operationName: 'Span0',
tags: [{ key: 'TagKey0', type: 'string', value: 'TagValue0' }],
kind: 'server',
statusCode: 2,
statusMessage: 'message',
instrumentationLibraryName: 'name',
instrumentationLibraryVersion: 'version',
traceState: 'state',
process: {
serviceName: 'Service0',
tags: [{ key: 'ProcessKey0', type: 'string', value: 'ProcessValue0' }],
},
logs: [{ fields: [{ key: 'LogKey0', type: 'string', value: 'LogValue0' }] }],
},
{
traceID: '1ed38015486087ca',
spanID: '2ed38015486087ca',
operationName: 'Span1',
tags: [{ key: 'TagKey1', type: 'string', value: 'TagValue1' }],
process: {
serviceName: 'Service1',
tags: [{ key: 'ProcessKey1', type: 'string', value: 'ProcessValue1' }],
},
logs: [{ fields: [{ key: 'LogKey1', type: 'string', value: 'LogValue1' }] }],
},
],
processes: {
'1ed38015486087ca': {
serviceName: 'Service0',
tags: [],
},
},
} as unknown as Trace;
describe('SpanFilters', () => {
let user: ReturnType<typeof userEvent.setup>;
const SpanFiltersWithProps = ({ showFilters = true, matches }: { showFilters?: boolean; matches?: Set<string> }) => {
const [search, setSearch] = useState(DEFAULT_SPAN_FILTERS);
const props = {
trace: trace,
showSpanFilters: showFilters,
setShowSpanFilters: jest.fn(),
search,
setSearch,
spanFilterMatches: matches,
setFocusedSpanIdForSearch: jest.fn(),
datasourceType: 'tempo',
};
return <SpanFilters {...props} />;
};
beforeEach(() => {
jest.useFakeTimers();
// Need to use delay: null here to work with fakeTimers
// see https://github.com/testing-library/user-event/issues/833
user = userEvent.setup({ delay: null });
});
afterEach(() => {
jest.useRealTimers();
});
it('should render', () => {
expect(() => render(<SpanFiltersWithProps />)).not.toThrow();
});
it('should render filters', async () => {
render(<SpanFiltersWithProps />);
const serviceOperator = screen.getByLabelText('Select service name operator');
const serviceValue = screen.getByLabelText('Select service name');
const spanOperator = screen.getByLabelText('Select span name operator');
const spanValue = screen.getByLabelText('Select span name');
const fromOperator = screen.getByLabelText('Select min span operator');
const fromValue = screen.getByLabelText('Select min span duration');
const toOperator = screen.getByLabelText('Select max span operator');
const toValue = screen.getByLabelText('Select max span duration');
const tagKey = screen.getByLabelText('Select tag key');
const tagOperator = screen.getByLabelText('Select tag operator');
const tagSelectValue = screen.getByLabelText('Select tag value');
expect(serviceOperator).toBeInTheDocument();
expect(getElemText(serviceOperator)).toBe('=');
expect(serviceValue).toBeInTheDocument();
expect(spanOperator).toBeInTheDocument();
expect(getElemText(spanOperator)).toBe('=');
expect(spanValue).toBeInTheDocument();
expect(fromOperator).toBeInTheDocument();
expect(getElemText(fromOperator)).toBe('>');
expect(fromValue).toBeInTheDocument();
expect(toOperator).toBeInTheDocument();
expect(getElemText(toOperator)).toBe('<');
expect(toValue).toBeInTheDocument();
expect(tagKey).toBeInTheDocument();
expect(tagOperator).toBeInTheDocument();
expect(getElemText(tagOperator)).toBe('=');
expect(tagSelectValue).toBeInTheDocument();
await user.click(serviceValue);
jest.advanceTimersByTime(1000);
await waitFor(() => {
expect(screen.getByText('Service0')).toBeInTheDocument();
expect(screen.getByText('Service1')).toBeInTheDocument();
});
await user.click(spanValue);
jest.advanceTimersByTime(1000);
await waitFor(() => {
expect(screen.getByText('Span0')).toBeInTheDocument();
expect(screen.getByText('Span1')).toBeInTheDocument();
});
await user.click(tagOperator);
jest.advanceTimersByTime(1000);
await waitFor(() => {
expect(screen.getByText('!~')).toBeInTheDocument();
expect(screen.getByText('=~')).toBeInTheDocument();
expect(screen.getByText('!~')).toBeInTheDocument();
});
await user.click(tagKey);
jest.advanceTimersByTime(1000);
await waitFor(() => {
expect(screen.getByText('TagKey0')).toBeInTheDocument();
expect(screen.getByText('TagKey1')).toBeInTheDocument();
expect(screen.getByText('kind')).toBeInTheDocument();
expect(screen.getByText('ProcessKey0')).toBeInTheDocument();
expect(screen.getByText('ProcessKey1')).toBeInTheDocument();
expect(screen.getByText('LogKey0')).toBeInTheDocument();
expect(screen.getByText('LogKey1')).toBeInTheDocument();
expect(screen.getByPlaceholderText('Find...')).toBeInTheDocument();
});
});
it('should update filters', async () => {
render(<SpanFiltersWithProps />);
const serviceValue = screen.getByLabelText('Select service name');
const spanValue = screen.getByLabelText('Select span name');
const tagKey = screen.getByLabelText('Select tag key');
const tagOperator = screen.getByLabelText('Select tag operator');
const tagValue = screen.getByLabelText('Select tag value');
expect(getElemText(serviceValue)).toBe('All service names');
await selectAndCheckValue(user, serviceValue, 'Service0');
expect(getElemText(spanValue)).toBe('All span names');
await selectAndCheckValue(user, spanValue, 'Span0');
await user.click(tagValue);
jest.advanceTimersByTime(1000);
await waitFor(() => expect(screen.getByText('No options found')).toBeInTheDocument());
expect(getElemText(tagKey)).toBe('Select tag');
await selectAndCheckValue(user, tagKey, 'TagKey0');
expect(getElemText(tagValue)).toBe('Select value');
await selectAndCheckValue(user, tagValue, 'TagValue0');
expect(screen.queryByLabelText('Input tag value')).toBeNull();
await selectAndCheckValue(user, tagOperator, '=~');
expect(screen.getByLabelText('Input tag value')).toBeInTheDocument();
});
it('should order tag filters', async () => {
render(<SpanFiltersWithProps />);
const tagKey = screen.getByLabelText('Select tag key');
await user.click(tagKey);
jest.advanceTimersByTime(1000);
await waitFor(() => {
const container = screen.getByText('TagKey0').parentElement?.parentElement?.parentElement;
expect(container?.childNodes[1].textContent).toBe('ProcessKey0');
expect(container?.childNodes[2].textContent).toBe('ProcessKey1');
expect(container?.childNodes[3].textContent).toBe('TagKey0');
expect(container?.childNodes[4].textContent).toBe('TagKey1');
expect(container?.childNodes[5].textContent).toBe('id');
expect(container?.childNodes[6].textContent).toBe('kind');
expect(container?.childNodes[7].textContent).toBe('library.name');
expect(container?.childNodes[8].textContent).toBe('library.version');
expect(container?.childNodes[9].textContent).toBe('status');
expect(container?.childNodes[10].textContent).toBe('status.message');
expect(container?.childNodes[11].textContent).toBe('trace.state');
expect(container?.childNodes[12].textContent).toBe('LogKey0');
expect(container?.childNodes[13].textContent).toBe('LogKey1');
});
});
it('should only show add/remove tag when necessary', async () => {
render(<SpanFiltersWithProps />);
expect(screen.queryAllByLabelText('Add tag').length).toBe(0); // not filled in the default tag, so no need to add another one
expect(screen.queryAllByLabelText('Remove tag').length).toBe(0); // mot filled in the default tag, so no values to remove
expect(screen.getAllByLabelText('Select tag key').length).toBe(1);
await selectAndCheckValue(user, screen.getByLabelText('Select tag key'), 'TagKey0');
expect(screen.getAllByLabelText('Add tag').length).toBe(1);
expect(screen.getAllByLabelText('Remove tag').length).toBe(1);
await user.click(screen.getByLabelText('Add tag'));
jest.advanceTimersByTime(1000);
expect(screen.queryAllByLabelText('Add tag').length).toBe(0); // not filled in the new tag, so no need to add another one
expect(screen.getAllByLabelText('Remove tag').length).toBe(2); // one for each tag
expect(screen.getAllByLabelText('Select tag key').length).toBe(2);
await user.click(screen.getAllByLabelText('Remove tag')[1]);
jest.advanceTimersByTime(1000);
expect(screen.queryAllByLabelText('Add tag').length).toBe(1); // filled in the default tag, so can add another one
expect(screen.queryAllByLabelText('Remove tag').length).toBe(1); // filled in the default tag, so can remove values
expect(screen.getAllByLabelText('Select tag key').length).toBe(1);
await user.click(screen.getAllByLabelText('Remove tag')[0]);
jest.advanceTimersByTime(1000);
expect(screen.queryAllByLabelText('Add tag').length).toBe(0); // not filled in the default tag, so no need to add another one
expect(screen.queryAllByLabelText('Remove tag').length).toBe(0); // mot filled in the default tag, so no values to remove
expect(screen.getAllByLabelText('Select tag key').length).toBe(1);
});
it('should allow adding/removing tags', async () => {
render(<SpanFiltersWithProps />);
expect(screen.getAllByLabelText('Select tag key').length).toBe(1);
const tagKey = screen.getByLabelText('Select tag key');
await selectAndCheckValue(user, tagKey, 'TagKey0');
await user.click(screen.getByLabelText('Add tag'));
jest.advanceTimersByTime(1000);
expect(screen.getAllByLabelText('Select tag key').length).toBe(2);
await user.click(screen.getAllByLabelText('Remove tag')[0]);
jest.advanceTimersByTime(1000);
expect(screen.getAllByLabelText('Select tag key').length).toBe(1);
});
it('renders buttons when span filters is collapsed', async () => {
render(<SpanFiltersWithProps showFilters={false} />);
expect(screen.queryByRole('button', { name: 'Next result button' })).toBeInTheDocument();
expect(screen.queryByRole('button', { name: 'Prev result button' })).toBeInTheDocument();
});
});
const selectAndCheckValue = async (user: ReturnType<typeof userEvent.setup>, elem: HTMLElement, text: string) => {
await user.click(elem);
jest.advanceTimersByTime(1000);
await waitFor(() => expect(screen.getByText(text)).toBeInTheDocument());
await user.click(screen.getByText(text));
jest.advanceTimersByTime(1000);
expect(screen.getByText(text)).toBeInTheDocument();
};
const getElemText = (elem: HTMLElement) => {
return elem.parentElement?.previousSibling?.textContent;
};

View File

@@ -0,0 +1,319 @@
// Copyright (c) 2017 Uber Technologies, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { css } from '@emotion/css';
import React, { useState, useEffect, memo, useCallback, useRef } from 'react';
import { GrafanaTheme2, TraceSearchProps, SelectableValue, toOption } from '@grafana/data';
import { Trans, t } from '@grafana/i18n';
import { IntervalInput } from '@grafana/o11y-ds-frontend';
import { Collapse, Icon, InlineField, InlineFieldRow, Select, Stack, Tooltip, useStyles2 } from '@grafana/ui';
import { DEFAULT_SPAN_FILTERS } from '../../../../state/constants';
import { getTraceServiceNames, getTraceSpanNames } from '../../../utils/tags';
import SearchBarInput from '../../common/SearchBarInput';
import { Trace } from '../../types/trace';
import NextPrevResult from '../SearchBar/NextPrevResult';
import TracePageSearchBar from '../SearchBar/TracePageSearchBar';
import { SpanFiltersTags } from './SpanFiltersTags';
export type SpanFilterProps = {
trace: Trace;
search: TraceSearchProps;
setSearch: (newSearch: TraceSearchProps) => void;
showSpanFilters: boolean;
setShowSpanFilters: (isOpen: boolean) => void;
setFocusedSpanIdForSearch: React.Dispatch<React.SetStateAction<string>>;
spanFilterMatches: Set<string> | undefined;
datasourceType: string;
};
export const SpanFilters = memo((props: SpanFilterProps) => {
const {
trace,
search,
setSearch,
showSpanFilters,
setShowSpanFilters,
setFocusedSpanIdForSearch,
spanFilterMatches,
datasourceType,
} = props;
const styles = { ...useStyles2(getStyles) };
const [serviceNames, setServiceNames] = useState<Array<SelectableValue<string>>>();
const [spanNames, setSpanNames] = useState<Array<SelectableValue<string>>>();
const [focusedSpanIndexForSearch, setFocusedSpanIndexForSearch] = useState(-1);
const [tagKeys, setTagKeys] = useState<Array<SelectableValue<string>>>();
const [tagValues, setTagValues] = useState<{ [key: string]: Array<SelectableValue<string>> }>({});
const prevTraceIdRef = useRef<string>();
const durationRegex = /^\d+(?:\.\d)?\d*(?:ns|us|µs|ms|s|m|h)$/;
const clear = useCallback(() => {
setServiceNames(undefined);
setSpanNames(undefined);
setTagKeys(undefined);
setTagValues({});
setSearch(DEFAULT_SPAN_FILTERS);
}, [setSearch]);
useEffect(() => {
// Only clear filters when trace ID actually changes (not on initial mount)
const currentTraceId = trace?.traceID;
const traceHasChanged = prevTraceIdRef.current && prevTraceIdRef.current !== currentTraceId;
if (traceHasChanged) {
clear();
}
prevTraceIdRef.current = currentTraceId;
}, [clear, trace]);
const setShowSpanFilterMatchesOnly = useCallback(
(showMatchesOnly: boolean) => {
setSearch({ ...search, matchesOnly: showMatchesOnly });
},
[search, setSearch]
);
if (!trace) {
return null;
}
const setSpanFiltersSearch = (spanSearch: TraceSearchProps) => {
setFocusedSpanIndexForSearch(-1);
setFocusedSpanIdForSearch('');
setSearch(spanSearch);
};
const getServiceNames = () => {
if (!serviceNames) {
setServiceNames(getTraceServiceNames(trace).map(toOption));
}
};
const getSpanNames = () => {
if (!spanNames) {
setSpanNames(getTraceSpanNames(trace).map(toOption));
}
};
const collapseLabel = (
<>
<Tooltip
content={t(
'explore.span-filters.tooltip-collapse',
'Filter your spans below. You can continue to apply filters until you have narrowed down your resulting spans to the select few you are most interested in.'
)}
placement="right"
>
<span className={styles.collapseLabel}>
<Trans i18nKey="explore.span-filters.label-collapse">Span Filters</Trans>
<Icon size="md" name="info-circle" />
</span>
</Tooltip>
{!showSpanFilters && (
<div className={styles.nextPrevResult}>
<NextPrevResult
trace={trace}
spanFilterMatches={spanFilterMatches}
setFocusedSpanIdForSearch={setFocusedSpanIdForSearch}
focusedSpanIndexForSearch={focusedSpanIndexForSearch}
setFocusedSpanIndexForSearch={setFocusedSpanIndexForSearch}
datasourceType={datasourceType}
showSpanFilters={showSpanFilters}
/>
</div>
)}
</>
);
return (
<div className={styles.container}>
<Collapse label={collapseLabel} isOpen={showSpanFilters} onToggle={setShowSpanFilters}>
<InlineFieldRow className={styles.flexContainer}>
<InlineField label={t('explore.span-filters.label-service-name', 'Service name')} labelWidth={16}>
<Stack gap={0.5}>
<Select
aria-label={t(
'explore.span-filters.aria-label-select-service-name-operator',
'Select service name operator'
)}
onChange={(v) => setSpanFiltersSearch({ ...search, serviceNameOperator: v.value! })}
options={[toOption('='), toOption('!=')]}
value={search.serviceNameOperator}
/>
<Select
aria-label={t('explore.span-filters.aria-label-select-service-name', 'Select service name')}
isClearable
onChange={(v) => setSpanFiltersSearch({ ...search, serviceName: v?.value || '' })}
onOpenMenu={getServiceNames}
options={serviceNames || (search.serviceName ? [search.serviceName].map(toOption) : [])}
placeholder={t('explore.span-filters.placeholder-all-service-names', 'All service names')}
value={search.serviceName || null}
defaultValue={search.serviceName || null}
/>
</Stack>
</InlineField>
<SearchBarInput
onChange={(v) => {
setSpanFiltersSearch({ ...search, query: v, matchesOnly: v !== '' });
}}
value={search.query || ''}
/>
</InlineFieldRow>
<InlineFieldRow>
<InlineField label={t('explore.span-filters.label-span-name', 'Span name')} labelWidth={16}>
<Stack gap={0.5}>
<Select
aria-label={t('explore.span-filters.aria-label-select-span-name-operator', 'Select span name operator')}
onChange={(v) => setSpanFiltersSearch({ ...search, spanNameOperator: v.value! })}
options={[toOption('='), toOption('!=')]}
value={search.spanNameOperator}
/>
<Select
aria-label={t('explore.span-filters.aria-label-select-span-name', 'Select span name')}
isClearable
onChange={(v) => setSpanFiltersSearch({ ...search, spanName: v?.value || '' })}
onOpenMenu={getSpanNames}
options={spanNames || (search.spanName ? [search.spanName].map(toOption) : [])}
placeholder={t('explore.span-filters.placeholder-all-span-names', 'All span names')}
value={search.spanName || null}
/>
</Stack>
</InlineField>
</InlineFieldRow>
<InlineFieldRow>
<InlineField
label={t('explore.span-filters.label-duration', 'Duration')}
labelWidth={16}
tooltip={t('explore.span-filters.tooltip-duration', 'Filter by duration. Accepted units are {{units}}', {
units: 'ns, us, ms, s, m, h',
})}
>
<Stack alignItems="flex-start" gap={0.5}>
<Select
aria-label={t('explore.span-filters.aria-label-select-min-span-operator', 'Select min span operator')}
onChange={(v) => setSpanFiltersSearch({ ...search, fromOperator: v.value! })}
options={[toOption('>'), toOption('>=')]}
value={search.fromOperator}
/>
<div className={styles.intervalInput}>
<IntervalInput
ariaLabel={t('explore.span-filters.ariaLabel-select-min-span-duration', 'Select min span duration')}
onChange={(val) => setSpanFiltersSearch({ ...search, from: val })}
isInvalidError="Invalid duration"
// eslint-disable-next-line @grafana/i18n/no-untranslated-strings
placeholder="e.g. 100ms, 1.2s"
width={18}
value={search.from || ''}
validationRegex={durationRegex}
/>
</div>
<Select
aria-label={t('explore.span-filters.aria-label-select-max-span-operator', 'Select max span operator')}
onChange={(v) => setSpanFiltersSearch({ ...search, toOperator: v.value! })}
options={[toOption('<'), toOption('<=')]}
value={search.toOperator}
/>
<IntervalInput
ariaLabel={t('explore.span-filters.ariaLabel-select-max-span-duration', 'Select max span duration')}
onChange={(val) => setSpanFiltersSearch({ ...search, to: val })}
isInvalidError="Invalid duration"
// eslint-disable-next-line @grafana/i18n/no-untranslated-strings
placeholder="e.g. 100ms, 1.2s"
width={18}
value={search.to || ''}
validationRegex={durationRegex}
/>
</Stack>
</InlineField>
</InlineFieldRow>
<InlineFieldRow className={styles.tagsRow}>
<InlineField
label={t('explore.span-filters.label-tags', 'Tags')}
labelWidth={16}
tooltip={t(
'explore.span-filters.tooltip-tags',
'Filter by tags, process tags or log fields in your spans.'
)}
>
<SpanFiltersTags
search={search}
setSearch={setSpanFiltersSearch}
trace={trace}
tagKeys={tagKeys}
setTagKeys={setTagKeys}
tagValues={tagValues}
setTagValues={setTagValues}
/>
</InlineField>
</InlineFieldRow>
<TracePageSearchBar
trace={trace}
search={search}
spanFilterMatches={spanFilterMatches}
setShowSpanFilterMatchesOnly={setShowSpanFilterMatchesOnly}
setFocusedSpanIdForSearch={setFocusedSpanIdForSearch}
focusedSpanIndexForSearch={focusedSpanIndexForSearch}
setFocusedSpanIndexForSearch={setFocusedSpanIndexForSearch}
datasourceType={datasourceType}
showSpanFilters={showSpanFilters}
/>
</Collapse>
</div>
);
});
SpanFilters.displayName = 'SpanFilters';
const getStyles = (theme: GrafanaTheme2) => ({
container: css({
label: 'SpanFilters',
margin: `0.5em 0 -${theme.spacing(1)} 0`,
zIndex: 5,
'& > div': {
borderLeft: 'none',
borderRight: 'none',
},
}),
collapseLabel: css({
svg: {
color: '#aaa',
margin: '-2px 0 0 10px',
},
}),
flexContainer: css({
display: 'flex',
justifyContent: 'space-between',
}),
intervalInput: css({
margin: '0 -4px 0 0',
}),
tagsRow: css({
margin: '-4px 0 0 0',
}),
nextPrevResult: css({
flex: 1,
alignItems: 'center',
display: 'flex',
justifyContent: 'flex-end',
marginRight: theme.spacing(1),
}),
});

View File

@@ -0,0 +1,202 @@
import { css } from '@emotion/css';
import React from 'react';
import { useMount } from 'react-use';
import { GrafanaTheme2, SelectableValue, toOption, TraceSearchProps, TraceSearchTag } from '@grafana/data';
import { t } from '@grafana/i18n';
import { AccessoryButton } from '@grafana/plugin-ui';
import { Input, Select, Stack, useStyles2 } from '@grafana/ui';
import { randomId } from '../../../../state/constants';
import { getTraceTagKeys, getTraceTagValues } from '../../../utils/tags';
import { Trace } from '../../types/trace';
interface Props {
search: TraceSearchProps;
setSearch: (search: TraceSearchProps) => void;
trace: Trace;
tagKeys?: Array<SelectableValue<string>>;
setTagKeys: React.Dispatch<React.SetStateAction<Array<SelectableValue<string>> | undefined>>;
tagValues: Record<string, Array<SelectableValue<string>>>;
setTagValues: React.Dispatch<React.SetStateAction<{ [key: string]: Array<SelectableValue<string>> }>>;
}
export const SpanFiltersTags = ({ search, trace, setSearch, tagKeys, setTagKeys, tagValues, setTagValues }: Props) => {
const styles = { ...useStyles2(getStyles) };
const getTagKeys = () => {
if (!tagKeys) {
setTagKeys(getTraceTagKeys(trace).map(toOption));
}
};
const getTagValues = (key: string) => {
return getTraceTagValues(trace, key).map(toOption);
};
useMount(() => {
if (search.tags) {
search.tags.forEach((tag) => {
if (tag.key) {
setTagValues({
...tagValues,
[tag.id]: getTagValues(tag.key),
});
}
});
}
});
const onTagChange = (tag: TraceSearchTag, v: SelectableValue<string>) => {
setSearch({
...search,
tags: search.tags?.map((x) => {
return x.id === tag.id ? { ...x, key: v?.value || '', value: undefined } : x;
}),
});
const loadTagValues = async () => {
if (v?.value) {
setTagValues({
...tagValues,
[tag.id]: getTagValues(v.value),
});
} else {
// removed value
const updatedValues = { ...tagValues };
if (updatedValues[tag.id]) {
delete updatedValues[tag.id];
}
setTagValues(updatedValues);
}
};
loadTagValues();
};
const addTag = () => {
const tag = {
id: randomId(),
operator: '=',
};
setSearch({ ...search, tags: [...search.tags, tag] });
};
const removeTag = (id: string) => {
let tags = search.tags.filter((tag) => {
return tag.id !== id;
});
if (tags.length === 0) {
tags = [
{
id: randomId(),
operator: '=',
},
];
}
setSearch({ ...search, tags: tags });
};
return (
<div>
{search.tags?.map((tag, i) => (
<div key={tag.id}>
<Stack gap={0} width={'auto'} justifyContent={'flex-start'} alignItems={'center'}>
<div>
<Select
aria-label={t('explore.span-filters-tags.aria-label-select-tag-key', 'Select tag key')}
isClearable
key={tag.key}
onChange={(v) => onTagChange(tag, v)}
onOpenMenu={getTagKeys}
options={tagKeys || (tag.key ? [tag.key].map(toOption) : [])}
placeholder={t('explore.span-filters-tags.placeholder-select-tag', 'Select tag')}
value={tag.key || null}
/>
</div>
<div>
<Select
aria-label={t('explore.span-filters-tags.aria-label-select-tag-operator', 'Select tag operator')}
onChange={(v) => {
setSearch({
...search,
tags: search.tags?.map((x) => {
return x.id === tag.id ? { ...x, operator: v.value! } : x;
}),
});
}}
options={[toOption('='), toOption('!='), toOption('=~'), toOption('!~')]}
value={tag.operator}
/>
</div>
<span className={styles.tagValues}>
{(tag.operator === '=' || tag.operator === '!=') && (
<Select
aria-label={t('explore.span-filters-tags.aria-label-select-tag-value', 'Select tag value')}
isClearable
key={tag.value}
onChange={(v) => {
setSearch({
...search,
tags: search.tags?.map((x) => {
return x.id === tag.id ? { ...x, value: v?.value || '' } : x;
}),
});
}}
options={tagValues[tag.id] ? tagValues[tag.id] : tag.value ? [tag.value].map(toOption) : []}
placeholder={t('explore.span-filters-tags.placeholder-select-value', 'Select value')}
value={tag.value}
/>
)}
{(tag.operator === '=~' || tag.operator === '!~') && (
<Input
aria-label={t('explore.span-filters-tags.aria-label-input-tag-value', 'Input tag value')}
onChange={(v) => {
setSearch({
...search,
tags: search.tags?.map((x) => {
return x.id === tag.id ? { ...x, value: v?.currentTarget?.value || '' } : x;
}),
});
}}
placeholder={t('explore.span-filters-tags.placeholder-tag-value', 'Tag value')}
width={18}
value={tag.value || ''}
/>
)}
</span>
{(tag.key || tag.value || search.tags.length > 1) && (
<AccessoryButton
aria-label={t('explore.span-filters-tags.aria-label-remove-tag', 'Remove tag')}
variant="secondary"
icon="times"
onClick={() => removeTag(tag.id)}
tooltip={t('explore.span-filters-tags.tooltip-remove-tag', 'Remove tag')}
/>
)}
{(tag.key || tag.value) && i === search.tags.length - 1 && (
<span className={styles.addTag}>
<AccessoryButton
aria-label={t('explore.span-filters-tags.aria-label-add-tag', 'Add tag')}
variant="secondary"
icon="plus"
onClick={addTag}
tooltip={t('explore.span-filters-tags.tooltip-add-tag', 'Add tag')}
/>
</span>
)}
</Stack>
</div>
))}
</div>
);
};
const getStyles = (theme: GrafanaTheme2) => ({
addTag: css({
marginLeft: theme.spacing(1),
}),
tagValues: css({
maxWidth: '200px',
}),
});

View File

@@ -1,3 +1,18 @@
// Copyright (c) 2025 Grafana Labs
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
import { useMemo, useState } from 'react';
import { TraceSearchProps } from '@grafana/data';

View File

@@ -5,5 +5,3 @@ export const LIBRARY_NAME = 'library.name';
export const LIBRARY_VERSION = 'library.version';
export const TRACE_STATE = 'trace.state';
export const ID = 'id';
export const SPAN_NAME = 'span.name';
export const SERVICE_NAME = 'service.name';

View File

@@ -16,17 +16,7 @@ import { SpanStatusCode } from '@opentelemetry/api';
import { SelectableValue, TraceKeyValuePair, TraceSearchProps, TraceSearchTag } from '@grafana/data';
import {
KIND,
LIBRARY_NAME,
LIBRARY_VERSION,
STATUS,
STATUS_MESSAGE,
TRACE_STATE,
ID,
SPAN_NAME,
SERVICE_NAME,
} from '../constants/span';
import { KIND, LIBRARY_NAME, LIBRARY_VERSION, STATUS, STATUS_MESSAGE, TRACE_STATE, ID } from '../constants/span';
import TNil from '../types/TNil';
import { TraceSpan, CriticalPathSection } from '../types/trace';
@@ -56,13 +46,13 @@ const getAdhocFilterMatches = (spans: TraceSpan[], adhocFilters: Array<Selectabl
return matchTextSearch(value, span);
}
// Special handling for service.name
if (key === SERVICE_NAME) {
// Special handling for serviceName
if (key === 'serviceName') {
return matchField(span.process.serviceName, operator, value);
}
// Special handling for span.name
if (key === SPAN_NAME) {
// Special handling for spanName (operationName)
if (key === 'spanName') {
return matchField(span.operationName, operator, value);
}

View File

@@ -99,7 +99,7 @@ describe('useSearch', () => {
// Check that adhoc filter was created
expect(result.current.search.adhocFilters).toHaveLength(1);
expect(result.current.search.adhocFilters?.[0]).toMatchObject({
key: 'service.name',
key: 'serviceName',
operator: '=',
value: 'my-service',
});
@@ -120,7 +120,7 @@ describe('useSearch', () => {
// Check that adhoc filter was created
expect(result.current.search.adhocFilters).toHaveLength(1);
expect(result.current.search.adhocFilters?.[0]).toMatchObject({
key: 'span.name',
key: 'spanName',
operator: '!=',
value: 'my-operation',
});
@@ -195,13 +195,13 @@ describe('useSearch', () => {
// Verify each filter
const filters = result.current.search.adhocFilters || [];
expect(filters.find((f) => f.key === 'service.name')).toMatchObject({
key: 'service.name',
expect(filters.find((f) => f.key === 'serviceName')).toMatchObject({
key: 'serviceName',
operator: '=',
value: 'my-service',
});
expect(filters.find((f) => f.key === 'span.name')).toMatchObject({
key: 'span.name',
expect(filters.find((f) => f.key === 'spanName')).toMatchObject({
key: 'spanName',
operator: '!=',
value: 'my-operation',
});
@@ -306,7 +306,7 @@ describe('useSearch', () => {
expect(result.current.search.adhocFilters).toHaveLength(5);
const filters = result.current.search.adhocFilters || [];
expect(filters.find((f) => f.key === 'service.name')?.operator).toBe('!=');
expect(filters.find((f) => f.key === 'serviceName')?.operator).toBe('!=');
expect(filters.find((f) => f.key === 'tag1')?.operator).toBe('=');
expect(filters.find((f) => f.key === 'tag2')?.operator).toBe('!=');
expect(filters.find((f) => f.key === 'tag3')?.operator).toBe('=~');

View File

@@ -7,7 +7,6 @@ import { useDispatch, useSelector } from 'app/types/store';
import { DEFAULT_SPAN_FILTERS, randomId } from '../state/constants';
import { changePanelState } from '../state/explorePane';
import { SPAN_NAME, SERVICE_NAME } from './components/constants/span';
import { TraceSpan, CriticalPathSection } from './components/types/trace';
import { filterSpans } from './components/utils/filter-spans';
@@ -26,7 +25,7 @@ export function migrateToAdhocFilters(search: TraceSearchProps): TraceSearchProp
// Migrate serviceName
if (search.serviceName && search.serviceName.trim() !== '') {
adhocFilters.push({
key: SERVICE_NAME,
key: 'serviceName',
operator: search.serviceNameOperator || '=',
value: search.serviceName,
});
@@ -35,7 +34,7 @@ export function migrateToAdhocFilters(search: TraceSearchProps): TraceSearchProp
// Migrate spanName
if (search.spanName && search.spanName.trim() !== '') {
adhocFilters.push({
key: SPAN_NAME,
key: 'spanName',
operator: search.spanNameOperator || '=',
value: search.spanName,
});

View File

@@ -9,8 +9,6 @@ import {
STATUS,
STATUS_MESSAGE,
TRACE_STATE,
SPAN_NAME,
SERVICE_NAME,
} from '../components/constants/span';
import { Trace } from '../components/types/trace';
@@ -39,11 +37,6 @@ export const getTraceTagKeys = (trace: Trace) => {
span.process.tags.forEach((tag) => {
keys.push(tag.key);
});
if (span.process.serviceName) {
keys.push(SERVICE_NAME);
}
if (span.logs !== null) {
span.logs.forEach((log) => {
log.fields.forEach((field) => {
@@ -70,9 +63,6 @@ export const getTraceTagKeys = (trace: Trace) => {
if (span.traceState) {
keys.push(TRACE_STATE);
}
if (span.operationName) {
keys.push(SPAN_NAME);
}
keys.push(ID);
});
keys = uniq(keys).sort();
@@ -103,11 +93,6 @@ export const getTraceTagValues = (trace: Trace, key: string) => {
}
switch (key) {
case SPAN_NAME:
if (span.operationName) {
values.push(span.operationName);
}
break;
case KIND:
if (span.kind) {
values.push(span.kind);

View File

@@ -5,13 +5,13 @@ import { MouseEvent, useCallback, useMemo } from 'react';
import {
CoreApp,
EventBus,
GrafanaTheme2,
LogLevel,
LogsDedupDescription,
LogsDedupStrategy,
LogsSortOrder,
store,
} from '@grafana/data';
import { GrafanaTheme2 } from '@grafana/data/';
import { t } from '@grafana/i18n';
import { config, reportInteraction } from '@grafana/runtime';
import { Dropdown, Menu, useStyles2 } from '@grafana/ui';

View File

@@ -3,7 +3,7 @@ import { http, HttpResponse } from 'msw';
import { setupServer } from 'msw/node';
import { render } from 'test/test-utils';
import { selectors as e2eSelectors } from '@grafana/e2e-selectors';
import { selectors as e2eSelectors } from '@grafana/e2e-selectors/src';
import { backendSrv } from 'app/core/services/backend_srv';
import { contextSrv } from 'app/core/services/context_srv';

Some files were not shown because too many files have changed in this diff Show More