Compare commits

...

2 Commits

Author SHA1 Message Date
Roberto Jimenez Sanchez
c9c0e7ace8 Regenerate client and openapi 2025-12-13 10:32:25 +01:00
Roberto Jimenez Sanchez
919308f835 feat(provisioning): add backend support for bulk dashboard export
This commit extracts the backend changes from the provisioning export feature,
implementing support for exporting specific dashboard resources to repositories.

Changes include:
- Add Resources field to ExportJobOptions for specifying dashboards to export
- Implement resource validation in export job validator
- Add specific resource export functionality
- Update worker to handle resource-specific exports
- Add comprehensive tests for export validation and functionality
- Update repository local storage to support custom paths

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
2025-12-12 19:10:19 +01:00
18 changed files with 1511 additions and 101 deletions

View File

@@ -133,6 +133,12 @@ type ExportJobOptions struct {
// FIXME: we should validate this in admission hooks
// Prefix in target file system
Path string `json:"path,omitempty"`
// Resources to export
// This option has been created because currently the frontend does not use
// standarized app platform APIs. For performance and API consistency reasons, the preferred option
// is it to use the resources.
Resources []ResourceRef `json:"resources,omitempty"`
}
type MigrateJobOptions struct {

View File

@@ -88,6 +88,11 @@ func (in *ErrorDetails) DeepCopy() *ErrorDetails {
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ExportJobOptions) DeepCopyInto(out *ExportJobOptions) {
*out = *in
if in.Resources != nil {
in, out := &in.Resources, &out.Resources
*out = make([]ResourceRef, len(*in))
copy(*out, *in)
}
return
}
@@ -430,7 +435,7 @@ func (in *JobSpec) DeepCopyInto(out *JobSpec) {
if in.Push != nil {
in, out := &in.Push, &out.Push
*out = new(ExportJobOptions)
**out = **in
(*in).DeepCopyInto(*out)
}
if in.Pull != nil {
in, out := &in.Pull, &out.Pull

View File

@@ -258,9 +258,25 @@ func schema_pkg_apis_provisioning_v0alpha1_ExportJobOptions(ref common.Reference
Format: "",
},
},
"resources": {
SchemaProps: spec.SchemaProps{
Description: "Resources to export This option has been created because currently the frontend does not use standarized app platform APIs. For performance and API consistency reasons, the preferred option is it to use the resources.",
Type: []string{"array"},
Items: &spec.SchemaOrArray{
Schema: &spec.Schema{
SchemaProps: spec.SchemaProps{
Default: map[string]interface{}{},
Ref: ref("github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1.ResourceRef"),
},
},
},
},
},
},
},
},
Dependencies: []string{
"github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1.ResourceRef"},
}
}

View File

@@ -1,5 +1,6 @@
API rule violation: list_type_missing,github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1,DeleteJobOptions,Paths
API rule violation: list_type_missing,github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1,DeleteJobOptions,Resources
API rule violation: list_type_missing,github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1,ExportJobOptions,Resources
API rule violation: list_type_missing,github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1,FileList,Items
API rule violation: list_type_missing,github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1,HistoryList,Items
API rule violation: list_type_missing,github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1,JobResourceSummary,Errors

View File

@@ -7,10 +7,11 @@ package v0alpha1
// ExportJobOptionsApplyConfiguration represents a declarative configuration of the ExportJobOptions type for use
// with apply.
type ExportJobOptionsApplyConfiguration struct {
Message *string `json:"message,omitempty"`
Folder *string `json:"folder,omitempty"`
Branch *string `json:"branch,omitempty"`
Path *string `json:"path,omitempty"`
Message *string `json:"message,omitempty"`
Folder *string `json:"folder,omitempty"`
Branch *string `json:"branch,omitempty"`
Path *string `json:"path,omitempty"`
Resources []ResourceRefApplyConfiguration `json:"resources,omitempty"`
}
// ExportJobOptionsApplyConfiguration constructs a declarative configuration of the ExportJobOptions type for use with
@@ -50,3 +51,16 @@ func (b *ExportJobOptionsApplyConfiguration) WithPath(value string) *ExportJobOp
b.Path = &value
return b
}
// WithResources adds the given value to the Resources field in the declarative configuration
// and returns the receiver, so that objects can be build by chaining "With" function invocations.
// If called multiple times, values provided by each call will be appended to the Resources field.
func (b *ExportJobOptionsApplyConfiguration) WithResources(values ...*ResourceRefApplyConfiguration) *ExportJobOptionsApplyConfiguration {
for i := range values {
if values[i] == nil {
panic("nil value passed to WithResources")
}
b.Resources = append(b.Resources, *values[i])
}
return b
}

View File

@@ -7,6 +7,7 @@ import (
provisioning "github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1"
"github.com/grafana/grafana/apps/provisioning/pkg/repository/git"
"github.com/grafana/grafana/apps/provisioning/pkg/safepath"
"github.com/grafana/grafana/pkg/registry/apis/provisioning/resources"
)
// ValidateJob performs validation on the Job specification and returns an error if validation fails
@@ -99,6 +100,40 @@ func validateExportJobOptions(opts *provisioning.ExportJobOptions) field.ErrorLi
}
}
// Validate resources if specified
if len(opts.Resources) > 0 {
for i, r := range opts.Resources {
resourcePath := field.NewPath("spec", "push", "resources").Index(i)
// Validate required fields
if r.Name == "" {
list = append(list, field.Required(resourcePath.Child("name"), "resource name is required"))
}
if r.Kind == "" {
list = append(list, field.Required(resourcePath.Child("kind"), "resource kind is required"))
}
if r.Group == "" {
list = append(list, field.Required(resourcePath.Child("group"), "resource group is required"))
}
// Validate that folders are not allowed
if r.Kind == resources.FolderKind.Kind || r.Group == resources.FolderResource.Group {
list = append(list, field.Invalid(resourcePath, r, "folders are not supported for export"))
continue // Skip further validation for folders
}
// Validate that only supported resources are allowed
// Currently only Dashboard resources are supported (folders are rejected above)
if r.Kind != "" && r.Group != "" {
// Check if it's a Dashboard resource
isDashboard := r.Group == resources.DashboardResource.Group && r.Kind == "Dashboard"
if !isDashboard {
list = append(list, field.Invalid(resourcePath, r, "resource type is not supported for export"))
}
}
}
}
return list
}

View File

@@ -575,6 +575,242 @@ func TestValidateJob(t *testing.T) {
},
wantErr: false,
},
{
name: "push action with valid dashboard resources",
job: &provisioning.Job{
ObjectMeta: metav1.ObjectMeta{
Name: "test-job",
},
Spec: provisioning.JobSpec{
Action: provisioning.JobActionPush,
Repository: "test-repo",
Push: &provisioning.ExportJobOptions{
Resources: []provisioning.ResourceRef{
{
Name: "dashboard-1",
Kind: "Dashboard",
Group: "dashboard.grafana.app",
},
{
Name: "dashboard-2",
Kind: "Dashboard",
Group: "dashboard.grafana.app",
},
},
Path: "dashboards/",
Message: "Export dashboards",
},
},
},
wantErr: false,
},
{
name: "push action with resource missing name",
job: &provisioning.Job{
ObjectMeta: metav1.ObjectMeta{
Name: "test-job",
},
Spec: provisioning.JobSpec{
Action: provisioning.JobActionPush,
Repository: "test-repo",
Push: &provisioning.ExportJobOptions{
Resources: []provisioning.ResourceRef{
{
Kind: "Dashboard",
Group: "dashboard.grafana.app",
},
},
},
},
},
wantErr: true,
validateError: func(t *testing.T, err error) {
require.Contains(t, err.Error(), "spec.push.resources[0].name")
require.Contains(t, err.Error(), "Required value")
},
},
{
name: "push action with resource missing kind",
job: &provisioning.Job{
ObjectMeta: metav1.ObjectMeta{
Name: "test-job",
},
Spec: provisioning.JobSpec{
Action: provisioning.JobActionPush,
Repository: "test-repo",
Push: &provisioning.ExportJobOptions{
Resources: []provisioning.ResourceRef{
{
Name: "dashboard-1",
Group: "dashboard.grafana.app",
},
},
},
},
},
wantErr: true,
validateError: func(t *testing.T, err error) {
require.Contains(t, err.Error(), "spec.push.resources[0].kind")
require.Contains(t, err.Error(), "Required value")
},
},
{
name: "push action with resource missing group",
job: &provisioning.Job{
ObjectMeta: metav1.ObjectMeta{
Name: "test-job",
},
Spec: provisioning.JobSpec{
Action: provisioning.JobActionPush,
Repository: "test-repo",
Push: &provisioning.ExportJobOptions{
Resources: []provisioning.ResourceRef{
{
Name: "dashboard-1",
Kind: "Dashboard",
},
},
},
},
},
wantErr: true,
validateError: func(t *testing.T, err error) {
require.Contains(t, err.Error(), "spec.push.resources[0].group")
require.Contains(t, err.Error(), "Required value")
},
},
{
name: "push action with folder resource by kind",
job: &provisioning.Job{
ObjectMeta: metav1.ObjectMeta{
Name: "test-job",
},
Spec: provisioning.JobSpec{
Action: provisioning.JobActionPush,
Repository: "test-repo",
Push: &provisioning.ExportJobOptions{
Resources: []provisioning.ResourceRef{
{
Name: "my-folder",
Kind: "Folder",
Group: "folder.grafana.app",
},
},
},
},
},
wantErr: true,
validateError: func(t *testing.T, err error) {
require.Contains(t, err.Error(), "spec.push.resources[0]")
require.Contains(t, err.Error(), "folders are not supported for export")
},
},
{
name: "push action with folder resource by group",
job: &provisioning.Job{
ObjectMeta: metav1.ObjectMeta{
Name: "test-job",
},
Spec: provisioning.JobSpec{
Action: provisioning.JobActionPush,
Repository: "test-repo",
Push: &provisioning.ExportJobOptions{
Resources: []provisioning.ResourceRef{
{
Name: "my-folder",
Kind: "SomeKind",
Group: "folder.grafana.app",
},
},
},
},
},
wantErr: true,
validateError: func(t *testing.T, err error) {
require.Contains(t, err.Error(), "spec.push.resources[0]")
require.Contains(t, err.Error(), "folders are not supported for export")
},
},
{
name: "push action with unsupported resource type",
job: &provisioning.Job{
ObjectMeta: metav1.ObjectMeta{
Name: "test-job",
},
Spec: provisioning.JobSpec{
Action: provisioning.JobActionPush,
Repository: "test-repo",
Push: &provisioning.ExportJobOptions{
Resources: []provisioning.ResourceRef{
{
Name: "my-resource",
Kind: "AlertRule",
Group: "alerting.grafana.app",
},
},
},
},
},
wantErr: true,
validateError: func(t *testing.T, err error) {
require.Contains(t, err.Error(), "spec.push.resources[0]")
require.Contains(t, err.Error(), "resource type is not supported for export")
},
},
{
name: "push action with valid folder (old behavior)",
job: &provisioning.Job{
ObjectMeta: metav1.ObjectMeta{
Name: "test-job",
},
Spec: provisioning.JobSpec{
Action: provisioning.JobActionPush,
Repository: "test-repo",
Push: &provisioning.ExportJobOptions{
Folder: "my-folder",
Path: "dashboards/",
Message: "Export folder",
},
},
},
wantErr: false,
},
{
name: "push action with multiple resources including invalid ones",
job: &provisioning.Job{
ObjectMeta: metav1.ObjectMeta{
Name: "test-job",
},
Spec: provisioning.JobSpec{
Action: provisioning.JobActionPush,
Repository: "test-repo",
Push: &provisioning.ExportJobOptions{
Resources: []provisioning.ResourceRef{
{
Name: "dashboard-1",
Kind: "Dashboard",
Group: "dashboard.grafana.app",
},
{
Name: "my-folder",
Kind: "Folder",
Group: "folder.grafana.app",
},
{
Name: "dashboard-2",
Kind: "Dashboard",
Group: "dashboard.grafana.app",
},
},
},
},
},
wantErr: true,
validateError: func(t *testing.T, err error) {
require.Contains(t, err.Error(), "spec.push.resources[1]")
require.Contains(t, err.Error(), "folders are not supported for export")
},
},
}
for _, tt := range tests {

View File

@@ -288,18 +288,18 @@ func (r *localRepository) calculateFileHash(path string) (string, int64, error)
return hex.EncodeToString(hasher.Sum(nil)), size, nil
}
func (r *localRepository) Create(ctx context.Context, filepath string, ref string, data []byte, comment string) error {
func (r *localRepository) Create(ctx context.Context, filePath string, ref string, data []byte, comment string) error {
if err := r.validateRequest(ref); err != nil {
return err
}
fpath := safepath.Join(r.path, filepath)
fpath := safepath.Join(r.path, filePath)
_, err := os.Stat(fpath)
if !errors.Is(err, os.ErrNotExist) {
if err != nil {
return apierrors.NewInternalError(fmt.Errorf("failed to check if file exists: %w", err))
}
return apierrors.NewAlreadyExists(schema.GroupResource{}, filepath)
return apierrors.NewAlreadyExists(schema.GroupResource{}, filePath)
}
if safepath.IsDir(fpath) {
@@ -314,7 +314,7 @@ func (r *localRepository) Create(ctx context.Context, filepath string, ref strin
return nil
}
if err := os.MkdirAll(path.Dir(fpath), 0700); err != nil {
if err := os.MkdirAll(filepath.Dir(fpath), 0700); err != nil {
return apierrors.NewInternalError(fmt.Errorf("failed to create path: %w", err))
}
@@ -352,7 +352,7 @@ func (r *localRepository) Write(ctx context.Context, fpath, ref string, data []b
return os.MkdirAll(fpath, 0700)
}
if err := os.MkdirAll(path.Dir(fpath), 0700); err != nil {
if err := os.MkdirAll(filepath.Dir(fpath), 0700); err != nil {
return apierrors.NewInternalError(fmt.Errorf("failed to create path: %w", err))
}

View File

@@ -1108,6 +1108,8 @@ export type ExportJobOptions = {
message?: string;
/** FIXME: we should validate this in admission hooks Prefix in target file system */
path?: string;
/** Resources to export This option has been created because currently the frontend does not use standarized app platform APIs. For performance and API consistency reasons, the preferred option is it to use the resources. */
resources?: ResourceRef[];
};
export type JobSpec = {
/** Possible enum values:

View File

@@ -13,6 +13,7 @@ import (
provisioning "github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1"
"github.com/grafana/grafana/apps/provisioning/pkg/repository"
"github.com/grafana/grafana/apps/provisioning/pkg/safepath"
"github.com/grafana/grafana/pkg/apimachinery/utils"
"github.com/grafana/grafana/pkg/registry/apis/provisioning/jobs"
"github.com/grafana/grafana/pkg/registry/apis/provisioning/resources"
@@ -23,8 +24,58 @@ import (
// The response status indicates the original stored version, so we can then request it in an un-converted form
type conversionShim = func(ctx context.Context, item *unstructured.Unstructured) (*unstructured.Unstructured, error)
// createDashboardConversionShim creates a conversion shim for dashboards that preserves the original API version.
// It uses a provided versionClients cache to allow sharing across multiple shim calls.
func createDashboardConversionShim(ctx context.Context, clients resources.ResourceClients, gvr schema.GroupVersionResource, versionClients map[string]dynamic.ResourceInterface) conversionShim {
shim := func(ctx context.Context, item *unstructured.Unstructured) (*unstructured.Unstructured, error) {
// Check if there's a stored version in the conversion status.
// This indicates the original API version the dashboard was created with,
// which should be preserved during export regardless of whether conversion succeeded or failed.
storedVersion, _, _ := unstructured.NestedString(item.Object, "status", "conversion", "storedVersion")
if storedVersion != "" {
// For v0 we can simply fallback -- the full model is saved
if strings.HasPrefix(storedVersion, "v0") {
item.SetAPIVersion(fmt.Sprintf("%s/%s", gvr.Group, storedVersion))
return item, nil
}
// For any other version (v1, v2, v3, etc.), fetch the original version via client
// Check if we already have a client cached for this version
versionClient, ok := versionClients[storedVersion]
if !ok {
// Dynamically construct the GroupVersionResource for any version
versionGVR := schema.GroupVersionResource{
Group: gvr.Group,
Version: storedVersion,
Resource: gvr.Resource,
}
var err error
versionClient, _, err = clients.ForResource(ctx, versionGVR)
if err != nil {
return nil, fmt.Errorf("get client for version %s: %w", storedVersion, err)
}
versionClients[storedVersion] = versionClient
}
return versionClient.Get(ctx, item.GetName(), metav1.GetOptions{})
}
// If conversion failed but there's no storedVersion, this is an error condition
failed, _, _ := unstructured.NestedBool(item.Object, "status", "conversion", "failed")
if failed {
return nil, fmt.Errorf("conversion failed but no storedVersion available")
}
return item, nil
}
return shim
}
func ExportResources(ctx context.Context, options provisioning.ExportJobOptions, clients resources.ResourceClients, repositoryResources resources.RepositoryResources, progress jobs.JobProgressRecorder) error {
progress.SetMessage(ctx, "start resource export")
// Create a shared versionClients map for dashboard conversion caching
versionClients := make(map[string]dynamic.ResourceInterface)
for _, kind := range resources.SupportedProvisioningResources {
// skip from folders as we do them first... so only dashboards
if kind == resources.FolderResource {
@@ -38,50 +89,10 @@ func ExportResources(ctx context.Context, options provisioning.ExportJobOptions,
}
// When requesting dashboards over the v1 api, we want to keep the original apiVersion if conversion fails
// Always use the cache version to share clients across all dashboard exports
var shim conversionShim
if kind.GroupResource() == resources.DashboardResource.GroupResource() {
// Cache clients for different versions
versionClients := make(map[string]dynamic.ResourceInterface)
shim = func(ctx context.Context, item *unstructured.Unstructured) (*unstructured.Unstructured, error) {
// Check if there's a stored version in the conversion status.
// This indicates the original API version the dashboard was created with,
// which should be preserved during export regardless of whether conversion succeeded or failed.
storedVersion, _, _ := unstructured.NestedString(item.Object, "status", "conversion", "storedVersion")
if storedVersion != "" {
// For v0 we can simply fallback -- the full model is saved
if strings.HasPrefix(storedVersion, "v0") {
item.SetAPIVersion(fmt.Sprintf("%s/%s", kind.Group, storedVersion))
return item, nil
}
// For any other version (v1, v2, v3, etc.), fetch the original version via client
// Check if we already have a client cached for this version
versionClient, ok := versionClients[storedVersion]
if !ok {
// Dynamically construct the GroupVersionResource for any version
versionGVR := schema.GroupVersionResource{
Group: kind.Group,
Version: storedVersion,
Resource: kind.Resource,
}
var err error
versionClient, _, err = clients.ForResource(ctx, versionGVR)
if err != nil {
return nil, fmt.Errorf("get client for version %s: %w", storedVersion, err)
}
versionClients[storedVersion] = versionClient
}
return versionClient.Get(ctx, item.GetName(), metav1.GetOptions{})
}
// If conversion failed but there's no storedVersion, this is an error condition
failed, _, _ := unstructured.NestedBool(item.Object, "status", "conversion", "failed")
if failed {
return nil, fmt.Errorf("conversion failed but no storedVersion available")
}
return item, nil
}
shim = createDashboardConversionShim(ctx, clients, kind, versionClients)
}
if err := exportResource(ctx, kind.Resource, options, client, shim, repositoryResources, progress); err != nil {
@@ -92,6 +103,320 @@ func ExportResources(ctx context.Context, options provisioning.ExportJobOptions,
return nil
}
// ExportSpecificResources exports a list of specific resources identified by ResourceRef entries.
// It validates that resources are not folders, are supported, and are unmanaged.
// Note: The caller must validate that the repository has a folder sync target before calling this function.
func ExportSpecificResources(ctx context.Context, repoName string, options provisioning.ExportJobOptions, clients resources.ResourceClients, repositoryResources resources.RepositoryResources, progress jobs.JobProgressRecorder) error {
if len(options.Resources) == 0 {
return errors.New("no resources specified for export")
}
progress.SetMessage(ctx, "exporting specific resources")
tree, err := loadUnmanagedFolderTree(ctx, clients, progress)
if err != nil {
return err
}
// Create a shared dashboard conversion shim and cache for all dashboard resources
// Create the versionClients map once so it's shared across all dashboard conversion calls
var dashboardShim conversionShim
versionClients := make(map[string]dynamic.ResourceInterface)
for _, resourceRef := range options.Resources {
if err := exportSingleResource(ctx, resourceRef, options, clients, repositoryResources, tree, &dashboardShim, versionClients, progress); err != nil {
return err
}
}
return nil
}
// loadUnmanagedFolderTree loads all unmanaged folders into a tree structure.
// This is needed to resolve folder paths for resources when exporting.
func loadUnmanagedFolderTree(ctx context.Context, clients resources.ResourceClients, progress jobs.JobProgressRecorder) (resources.FolderTree, error) {
progress.SetMessage(ctx, "loading folder tree from API server")
folderClient, err := clients.Folder(ctx)
if err != nil {
return nil, fmt.Errorf("get folder client: %w", err)
}
tree := resources.NewEmptyFolderTree()
if err := resources.ForEach(ctx, folderClient, func(item *unstructured.Unstructured) error {
if tree.Count() >= resources.MaxNumberOfFolders {
return errors.New("too many folders")
}
meta, err := utils.MetaAccessor(item)
if err != nil {
return fmt.Errorf("extract meta accessor: %w", err)
}
manager, _ := meta.GetManagerProperties()
// Skip if already managed by any manager (repository, file provisioning, etc.)
if manager.Identity != "" {
return nil
}
return tree.AddUnstructured(item)
}); err != nil {
return nil, fmt.Errorf("load folder tree: %w", err)
}
return tree, nil
}
// exportSingleResource exports a single resource, handling validation, fetching, conversion, and writing.
func exportSingleResource(
ctx context.Context,
resourceRef provisioning.ResourceRef,
options provisioning.ExportJobOptions,
clients resources.ResourceClients,
repositoryResources resources.RepositoryResources,
tree resources.FolderTree,
dashboardShim *conversionShim,
versionClients map[string]dynamic.ResourceInterface,
progress jobs.JobProgressRecorder,
) error {
result := jobs.JobResourceResult{
Name: resourceRef.Name,
Group: resourceRef.Group,
Kind: resourceRef.Kind,
Action: repository.FileActionCreated,
}
gvk := schema.GroupVersionKind{
Group: resourceRef.Group,
Kind: resourceRef.Kind,
// Version is left empty so ForKind will use the preferred version
}
// Validate resource reference
if err := validateResourceRef(gvk, &result, progress, ctx); err != nil {
return err
}
if result.Error != nil {
// Validation failed, but we continue processing other resources
return nil
}
// Get client and fetch resource
progress.SetMessage(ctx, fmt.Sprintf("Fetching resource %s/%s/%s", resourceRef.Group, resourceRef.Kind, resourceRef.Name))
client, gvr, err := clients.ForKind(ctx, gvk)
if err != nil {
result.Error = fmt.Errorf("get client for %s/%s/%s: %w", resourceRef.Group, resourceRef.Kind, resourceRef.Name, err)
progress.Record(ctx, result)
return progress.TooManyErrors()
}
// Validate resource type is supported
if err := validateResourceType(gvr, &result, progress, ctx); err != nil {
return err
}
if result.Error != nil {
return nil
}
// Fetch and validate the resource
item, meta, err := fetchAndValidateResource(ctx, client, resourceRef, gvr, &result, progress)
if err != nil {
return err
}
if result.Error != nil {
return nil
}
// Convert dashboard if needed
item, meta, err = convertDashboardIfNeeded(ctx, gvr, item, meta, clients, dashboardShim, versionClients, resourceRef, &result, progress)
if err != nil {
return err
}
if result.Error != nil {
return nil
}
// Compute export path from folder tree
exportPath := computeExportPath(options.Path, meta, tree)
// Export the resource
return writeResourceToRepository(ctx, item, meta, exportPath, options.Branch, repositoryResources, resourceRef, &result, progress)
}
// validateResourceRef validates that a resource reference is not a folder.
func validateResourceRef(gvk schema.GroupVersionKind, result *jobs.JobResourceResult, progress jobs.JobProgressRecorder, ctx context.Context) error {
if gvk.Kind == resources.FolderKind.Kind || gvk.Group == resources.FolderResource.Group {
result.Action = repository.FileActionIgnored
result.Error = fmt.Errorf("folders are not supported for export")
progress.Record(ctx, *result)
return progress.TooManyErrors()
}
return nil
}
// validateResourceType validates that a resource type is supported for export.
func validateResourceType(gvr schema.GroupVersionResource, result *jobs.JobResourceResult, progress jobs.JobProgressRecorder, ctx context.Context) error {
isSupported := false
for _, supported := range resources.SupportedProvisioningResources {
if supported.Group == gvr.Group && supported.Resource == gvr.Resource {
isSupported = true
break
}
}
if !isSupported {
result.Action = repository.FileActionIgnored
result.Error = fmt.Errorf("resource type %s/%s is not supported for export", gvr.Group, gvr.Resource)
progress.Record(ctx, *result)
return progress.TooManyErrors()
}
return nil
}
// fetchAndValidateResource fetches a resource from the API server and validates it's unmanaged.
func fetchAndValidateResource(
ctx context.Context,
client dynamic.ResourceInterface,
resourceRef provisioning.ResourceRef,
gvr schema.GroupVersionResource,
result *jobs.JobResourceResult,
progress jobs.JobProgressRecorder,
) (*unstructured.Unstructured, utils.GrafanaMetaAccessor, error) {
item, err := client.Get(ctx, resourceRef.Name, metav1.GetOptions{})
if err != nil {
result.Error = fmt.Errorf("get resource %s/%s/%s: %w", resourceRef.Group, resourceRef.Kind, resourceRef.Name, err)
progress.Record(ctx, *result)
return nil, nil, progress.TooManyErrors()
}
meta, err := utils.MetaAccessor(item)
if err != nil {
result.Action = repository.FileActionIgnored
result.Error = fmt.Errorf("extracting meta accessor for resource %s: %w", result.Name, err)
progress.Record(ctx, *result)
return nil, nil, progress.TooManyErrors()
}
manager, _ := meta.GetManagerProperties()
// Reject if already managed by any manager (repository, file provisioning, etc.)
if manager.Identity != "" {
result.Action = repository.FileActionIgnored
result.Error = fmt.Errorf("resource %s/%s/%s is managed and cannot be exported", resourceRef.Group, resourceRef.Kind, resourceRef.Name)
progress.Record(ctx, *result)
return nil, nil, progress.TooManyErrors()
}
return item, meta, nil
}
// convertDashboardIfNeeded converts a dashboard to its original API version if needed.
// Returns the potentially updated item and meta accessor.
func convertDashboardIfNeeded(
ctx context.Context,
gvr schema.GroupVersionResource,
item *unstructured.Unstructured,
meta utils.GrafanaMetaAccessor,
clients resources.ResourceClients,
dashboardShim *conversionShim,
versionClients map[string]dynamic.ResourceInterface,
resourceRef provisioning.ResourceRef,
result *jobs.JobResourceResult,
progress jobs.JobProgressRecorder,
) (*unstructured.Unstructured, utils.GrafanaMetaAccessor, error) {
if gvr.GroupResource() != resources.DashboardResource.GroupResource() {
return item, meta, nil
}
// Create or reuse the dashboard shim (shared across all dashboard resources)
// Pass the shared versionClients map to ensure client caching works correctly
if *dashboardShim == nil {
*dashboardShim = createDashboardConversionShim(ctx, clients, gvr, versionClients)
}
var err error
item, err = (*dashboardShim)(ctx, item)
if err != nil {
result.Error = fmt.Errorf("converting dashboard %s/%s/%s: %w", resourceRef.Group, resourceRef.Kind, resourceRef.Name, err)
progress.Record(ctx, *result)
return nil, nil, progress.TooManyErrors()
}
// Re-extract meta after shim conversion in case the item changed
meta, err = utils.MetaAccessor(item)
if err != nil {
result.Action = repository.FileActionIgnored
result.Error = fmt.Errorf("extracting meta accessor after conversion for resource %s: %w", result.Name, err)
progress.Record(ctx, *result)
return nil, nil, progress.TooManyErrors()
}
return item, meta, nil
}
// computeExportPath computes the export path by combining the base path with the folder path from the tree.
func computeExportPath(basePath string, meta utils.GrafanaMetaAccessor, tree resources.FolderTree) string {
exportPath := basePath
resourceFolder := meta.GetFolder()
if resourceFolder != "" {
// Get the folder path from the unmanaged tree (rootFolder is empty string for unmanaged tree)
fid, ok := tree.DirPath(resourceFolder, "")
if !ok {
// Folder not found in tree - this shouldn't happen for unmanaged folders
// but if it does, we'll just use the base path
return exportPath
}
if fid.Path != "" {
if exportPath != "" {
exportPath = safepath.Join(exportPath, fid.Path)
} else {
exportPath = fid.Path
}
}
}
return exportPath
}
// writeResourceToRepository writes a resource to the repository.
func writeResourceToRepository(
ctx context.Context,
item *unstructured.Unstructured,
meta utils.GrafanaMetaAccessor,
exportPath string,
branch string,
repositoryResources resources.RepositoryResources,
resourceRef provisioning.ResourceRef,
result *jobs.JobResourceResult,
progress jobs.JobProgressRecorder,
) error {
// Export the resource
progress.SetMessage(ctx, fmt.Sprintf("Exporting resource %s/%s/%s", resourceRef.Group, resourceRef.Kind, resourceRef.Name))
var err error
// exportPath already includes the folder structure from the unmanaged tree.
// We need to clear the folder metadata so WriteResourceFileFromObject doesn't try to resolve
// folder paths from repository tree (which doesn't have unmanaged folders).
// When folder is empty, WriteResourceFileFromObject will use rootFolder logic:
// - For instance targets: rootFolder is empty, so fid.Path will be empty, and it will use exportPath directly
// - For folder targets: rootFolder is repo name, but fid.Path will still be empty, so it will use exportPath directly
originalFolder := meta.GetFolder()
if originalFolder != "" {
meta.SetFolder("")
defer func() {
meta.SetFolder(originalFolder)
}()
}
result.Path, err = repositoryResources.WriteResourceFileFromObject(ctx, item, resources.WriteOptions{
Path: exportPath, // Path already includes folder structure from unmanaged tree
Ref: branch,
})
if errors.Is(err, resources.ErrAlreadyInRepository) {
result.Action = repository.FileActionIgnored
} else if err != nil {
result.Action = repository.FileActionIgnored
result.Error = fmt.Errorf("writing resource file for %s: %w", result.Name, err)
}
progress.Record(ctx, *result)
return progress.TooManyErrors()
}
func exportResource(ctx context.Context,
resource string,
options provisioning.ExportJobOptions,

View File

@@ -0,0 +1,340 @@
package export
import (
"context"
"fmt"
"testing"
"github.com/grafana/grafana/pkg/apimachinery/utils"
mock "github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
"k8s.io/apimachinery/pkg/runtime/schema"
provisioningV0 "github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1"
"github.com/grafana/grafana/apps/provisioning/pkg/repository"
"github.com/grafana/grafana/pkg/registry/apis/provisioning/jobs"
"github.com/grafana/grafana/pkg/registry/apis/provisioning/resources"
)
// createFolder creates a folder with the given Grafana UID as metadata.name and optional title
func createFolder(grafanaUID, k8sUID, title, parentUID string) unstructured.Unstructured {
folder := unstructured.Unstructured{
Object: map[string]interface{}{
"apiVersion": resources.FolderResource.GroupVersion().String(),
"kind": "Folder",
"metadata": map[string]interface{}{
"name": grafanaUID, // Grafana UID is stored as metadata.name
"uid": k8sUID,
},
"spec": map[string]interface{}{
"title": title,
},
},
}
if parentUID != "" {
meta, _ := utils.MetaAccessor(&folder)
meta.SetFolder(parentUID)
}
return folder
}
// createDashboardWithFolder creates a dashboard in the specified folder
func createDashboardWithFolder(name, folderUID string) unstructured.Unstructured {
dashboard := createDashboardObject(name)
if folderUID != "" {
meta, _ := utils.MetaAccessor(&dashboard)
meta.SetFolder(folderUID)
}
return dashboard
}
func TestExportSpecificResources(t *testing.T) {
tests := []struct {
name string
setupMocks func(t *testing.T) (resourceClients *resources.MockResourceClients, repoResources *resources.MockRepositoryResources, progress *jobs.MockJobProgressRecorder)
options provisioningV0.ExportJobOptions
wantErr string
assertResults func(t *testing.T, resourceClients *resources.MockResourceClients, repoResources *resources.MockRepositoryResources, progress *jobs.MockJobProgressRecorder)
}{
{
name: "success with folder paths",
setupMocks: func(t *testing.T) (*resources.MockResourceClients, *resources.MockRepositoryResources, *jobs.MockJobProgressRecorder) {
folder := createFolder("team-a-uid", "k8s-1", "team-a", "")
dashboard1 := createDashboardWithFolder("dashboard-1", "team-a-uid")
dashboard2 := createDashboardObject("dashboard-2")
resourceClients := resources.NewMockResourceClients(t)
folderClient := &mockDynamicInterface{items: []unstructured.Unstructured{folder}}
resourceClients.On("Folder", mock.Anything).Return(folderClient, nil)
gvk := schema.GroupVersionKind{Group: resources.DashboardResource.Group, Kind: "Dashboard"}
resourceClients.On("ForKind", mock.Anything, gvk).Return(&mockDynamicInterface{items: []unstructured.Unstructured{dashboard1}}, resources.DashboardResource, nil).Once()
resourceClients.On("ForKind", mock.Anything, gvk).Return(&mockDynamicInterface{items: []unstructured.Unstructured{dashboard2}}, resources.DashboardResource, nil).Once()
repoResources := resources.NewMockRepositoryResources(t)
repoResources.On("WriteResourceFileFromObject", mock.Anything,
mock.MatchedBy(func(obj *unstructured.Unstructured) bool { return obj.GetName() == "dashboard-1" }),
mock.MatchedBy(func(opts resources.WriteOptions) bool { return opts.Path == "grafana/team-a" })).
Return("grafana/team-a/dashboard-1.json", nil)
repoResources.On("WriteResourceFileFromObject", mock.Anything,
mock.MatchedBy(func(obj *unstructured.Unstructured) bool { return obj.GetName() == "dashboard-2" }),
mock.MatchedBy(func(opts resources.WriteOptions) bool { return opts.Path == "grafana" })).
Return("grafana/dashboard-2.json", nil)
progress := jobs.NewMockJobProgressRecorder(t)
progress.On("SetMessage", mock.Anything, mock.Anything).Return().Maybe()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Name == "dashboard-1" && r.Action == repository.FileActionCreated
})).Return()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Name == "dashboard-2" && r.Action == repository.FileActionCreated
})).Return()
progress.On("TooManyErrors").Return(nil).Times(2)
return resourceClients, repoResources, progress
},
options: provisioningV0.ExportJobOptions{
Path: "grafana",
Branch: "feature/branch",
Resources: []provisioningV0.ResourceRef{
{Name: "dashboard-1", Kind: "Dashboard", Group: resources.DashboardResource.Group},
{Name: "dashboard-2", Kind: "Dashboard", Group: resources.DashboardResource.Group},
},
},
},
{
name: "empty resources returns error",
setupMocks: func(t *testing.T) (*resources.MockResourceClients, *resources.MockRepositoryResources, *jobs.MockJobProgressRecorder) {
return nil, nil, nil
},
options: provisioningV0.ExportJobOptions{
Resources: []provisioningV0.ResourceRef{},
},
wantErr: "no resources specified for export",
},
{
name: "rejects folders",
setupMocks: func(t *testing.T) (*resources.MockResourceClients, *resources.MockRepositoryResources, *jobs.MockJobProgressRecorder) {
resourceClients := resources.NewMockResourceClients(t)
resourceClients.On("Folder", mock.Anything).Return(&mockDynamicInterface{}, nil)
progress := jobs.NewMockJobProgressRecorder(t)
progress.On("SetMessage", mock.Anything, mock.Anything).Return()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Name == "my-folder" && r.Error != nil && r.Error.Error() == "folders are not supported for export"
})).Return()
progress.On("TooManyErrors").Return(nil)
return resourceClients, nil, progress
},
options: provisioningV0.ExportJobOptions{
Resources: []provisioningV0.ResourceRef{{Name: "my-folder", Kind: "Folder", Group: resources.FolderResource.Group}},
},
},
{
name: "rejects managed resources",
setupMocks: func(t *testing.T) (*resources.MockResourceClients, *resources.MockRepositoryResources, *jobs.MockJobProgressRecorder) {
dashboard := createDashboardObject("managed-dashboard")
meta, _ := utils.MetaAccessor(&dashboard)
meta.SetManagerProperties(utils.ManagerProperties{Kind: utils.ManagerKindRepo, Identity: "some-repo"})
resourceClients := resources.NewMockResourceClients(t)
resourceClients.On("Folder", mock.Anything).Return(&mockDynamicInterface{}, nil)
gvk := schema.GroupVersionKind{Group: resources.DashboardResource.Group, Kind: "Dashboard"}
resourceClients.On("ForKind", mock.Anything, gvk).Return(&mockDynamicInterface{items: []unstructured.Unstructured{dashboard}}, resources.DashboardResource, nil)
progress := jobs.NewMockJobProgressRecorder(t)
progress.On("SetMessage", mock.Anything, mock.Anything).Return().Maybe()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Name == "managed-dashboard" && r.Error != nil && r.Error.Error() == "resource dashboard.grafana.app/Dashboard/managed-dashboard is managed and cannot be exported"
})).Return()
progress.On("TooManyErrors").Return(nil)
return resourceClients, nil, progress
},
options: provisioningV0.ExportJobOptions{
Resources: []provisioningV0.ResourceRef{{Name: "managed-dashboard", Kind: "Dashboard", Group: resources.DashboardResource.Group}},
},
},
{
name: "rejects unsupported resources",
setupMocks: func(t *testing.T) (*resources.MockResourceClients, *resources.MockRepositoryResources, *jobs.MockJobProgressRecorder) {
resourceClients := resources.NewMockResourceClients(t)
resourceClients.On("Folder", mock.Anything).Return(&mockDynamicInterface{}, nil)
gvk := schema.GroupVersionKind{Group: "playlist.grafana.app", Kind: "Playlist"}
gvr := schema.GroupVersionResource{Group: "playlist.grafana.app", Resource: "playlists"}
resourceClients.On("ForKind", mock.Anything, gvk).Return(&mockDynamicInterface{}, gvr, nil)
progress := jobs.NewMockJobProgressRecorder(t)
progress.On("SetMessage", mock.Anything, mock.Anything).Return().Maybe()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Name == "some-resource" && r.Error != nil && r.Error.Error() == "resource type playlist.grafana.app/playlists is not supported for export"
})).Return()
progress.On("TooManyErrors").Return(nil)
return resourceClients, nil, progress
},
options: provisioningV0.ExportJobOptions{
Resources: []provisioningV0.ResourceRef{{Name: "some-resource", Kind: "Playlist", Group: "playlist.grafana.app"}},
},
},
{
name: "resolves nested folder paths",
setupMocks: func(t *testing.T) (*resources.MockResourceClients, *resources.MockRepositoryResources, *jobs.MockJobProgressRecorder) {
parentFolder := createFolder("team-a-uid", "k8s-1", "team-a", "")
childFolder := createFolder("subteam-uid", "k8s-2", "subteam", "team-a-uid")
dashboard := createDashboardWithFolder("dashboard-in-nested-folder", "subteam-uid")
resourceClients := resources.NewMockResourceClients(t)
resourceClients.On("Folder", mock.Anything).Return(&mockDynamicInterface{items: []unstructured.Unstructured{parentFolder, childFolder}}, nil)
gvk := schema.GroupVersionKind{Group: resources.DashboardResource.Group, Kind: "Dashboard"}
resourceClients.On("ForKind", mock.Anything, gvk).Return(&mockDynamicInterface{items: []unstructured.Unstructured{dashboard}}, resources.DashboardResource, nil)
repoResources := resources.NewMockRepositoryResources(t)
repoResources.On("WriteResourceFileFromObject", mock.Anything,
mock.MatchedBy(func(obj *unstructured.Unstructured) bool { return obj.GetName() == "dashboard-in-nested-folder" }),
mock.MatchedBy(func(opts resources.WriteOptions) bool { return opts.Path == "grafana/team-a/subteam" })).
Return("grafana/team-a/subteam/dashboard-in-nested-folder.json", nil)
progress := jobs.NewMockJobProgressRecorder(t)
progress.On("SetMessage", mock.Anything, mock.Anything).Return().Maybe()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Name == "dashboard-in-nested-folder" && r.Action == repository.FileActionCreated
})).Return()
progress.On("TooManyErrors").Return(nil)
return resourceClients, repoResources, progress
},
options: provisioningV0.ExportJobOptions{
Path: "grafana",
Branch: "feature/branch",
Resources: []provisioningV0.ResourceRef{{Name: "dashboard-in-nested-folder", Kind: "Dashboard", Group: resources.DashboardResource.Group}},
},
},
{
name: "folder client error",
setupMocks: func(t *testing.T) (*resources.MockResourceClients, *resources.MockRepositoryResources, *jobs.MockJobProgressRecorder) {
resourceClients := resources.NewMockResourceClients(t)
resourceClients.On("Folder", mock.Anything).Return(nil, fmt.Errorf("folder client error"))
progress := jobs.NewMockJobProgressRecorder(t)
progress.On("SetMessage", mock.Anything, mock.Anything).Return()
return resourceClients, nil, progress
},
options: provisioningV0.ExportJobOptions{
Resources: []provisioningV0.ResourceRef{{Name: "dashboard-1", Kind: "Dashboard", Group: resources.DashboardResource.Group}},
},
wantErr: "get folder client: folder client error",
},
{
name: "resource not found",
setupMocks: func(t *testing.T) (*resources.MockResourceClients, *resources.MockRepositoryResources, *jobs.MockJobProgressRecorder) {
resourceClients := resources.NewMockResourceClients(t)
resourceClients.On("Folder", mock.Anything).Return(&mockDynamicInterface{}, nil)
gvk := schema.GroupVersionKind{Group: resources.DashboardResource.Group, Kind: "Dashboard"}
resourceClients.On("ForKind", mock.Anything, gvk).Return(&mockDynamicInterface{}, resources.DashboardResource, nil)
progress := jobs.NewMockJobProgressRecorder(t)
progress.On("SetMessage", mock.Anything, mock.Anything).Return().Maybe()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Name == "non-existent-dashboard" && r.Error != nil && r.Error.Error() == "get resource dashboard.grafana.app/Dashboard/non-existent-dashboard: no items found"
})).Return()
progress.On("TooManyErrors").Return(nil)
return resourceClients, nil, progress
},
options: provisioningV0.ExportJobOptions{
Resources: []provisioningV0.ResourceRef{{Name: "non-existent-dashboard", Kind: "Dashboard", Group: resources.DashboardResource.Group}},
},
},
{
name: "dashboard version conversion",
setupMocks: func(t *testing.T) (*resources.MockResourceClients, *resources.MockRepositoryResources, *jobs.MockJobProgressRecorder) {
v1Dashboard := unstructured.Unstructured{
Object: map[string]interface{}{
"apiVersion": resources.DashboardResource.GroupVersion().String(),
"kind": "Dashboard",
"metadata": map[string]interface{}{"name": "v2-dashboard"},
"status": map[string]interface{}{
"conversion": map[string]interface{}{"failed": true, "storedVersion": "v2alpha1"},
},
},
}
v2Dashboard := createV2DashboardObject("v2-dashboard", "v2alpha1")
resourceClients := resources.NewMockResourceClients(t)
resourceClients.On("Folder", mock.Anything).Return(&mockDynamicInterface{}, nil)
gvk := schema.GroupVersionKind{Group: resources.DashboardResource.Group, Kind: "Dashboard"}
resourceClients.On("ForKind", mock.Anything, gvk).Return(&mockDynamicInterface{items: []unstructured.Unstructured{v1Dashboard}}, resources.DashboardResource, nil)
v2GVR := schema.GroupVersionResource{Group: resources.DashboardResource.Group, Version: "v2alpha1", Resource: resources.DashboardResource.Resource}
resourceClients.On("ForResource", mock.Anything, v2GVR).Return(&mockDynamicInterface{items: []unstructured.Unstructured{v2Dashboard}}, gvk, nil)
repoResources := resources.NewMockRepositoryResources(t)
repoResources.On("WriteResourceFileFromObject", mock.Anything,
mock.MatchedBy(func(obj *unstructured.Unstructured) bool {
return obj.GetName() == "v2-dashboard" && obj.GetAPIVersion() == "dashboard.grafana.app/v2alpha1"
}),
mock.Anything).Return("grafana/v2-dashboard.json", nil)
progress := jobs.NewMockJobProgressRecorder(t)
progress.On("SetMessage", mock.Anything, mock.Anything).Return().Maybe()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Name == "v2-dashboard" && r.Action == repository.FileActionCreated
})).Return()
progress.On("TooManyErrors").Return(nil)
return resourceClients, repoResources, progress
},
options: provisioningV0.ExportJobOptions{
Resources: []provisioningV0.ResourceRef{{Name: "v2-dashboard", Kind: "Dashboard", Group: resources.DashboardResource.Group}},
},
},
{
name: "too many errors",
setupMocks: func(t *testing.T) (*resources.MockResourceClients, *resources.MockRepositoryResources, *jobs.MockJobProgressRecorder) {
dashboard := createDashboardObject("dashboard-1")
resourceClients := resources.NewMockResourceClients(t)
resourceClients.On("Folder", mock.Anything).Return(&mockDynamicInterface{}, nil)
gvk := schema.GroupVersionKind{Group: resources.DashboardResource.Group, Kind: "Dashboard"}
resourceClients.On("ForKind", mock.Anything, gvk).Return(&mockDynamicInterface{items: []unstructured.Unstructured{dashboard}}, resources.DashboardResource, nil)
repoResources := resources.NewMockRepositoryResources(t)
repoResources.On("WriteResourceFileFromObject", mock.Anything, mock.Anything, mock.Anything).Return("", fmt.Errorf("write error"))
progress := jobs.NewMockJobProgressRecorder(t)
progress.On("SetMessage", mock.Anything, mock.Anything).Return().Maybe()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Name == "dashboard-1" && r.Action == repository.FileActionIgnored && r.Error != nil
})).Return()
progress.On("TooManyErrors").Return(fmt.Errorf("too many errors"))
return resourceClients, repoResources, progress
},
options: provisioningV0.ExportJobOptions{
Resources: []provisioningV0.ResourceRef{{Name: "dashboard-1", Kind: "Dashboard", Group: resources.DashboardResource.Group}},
},
wantErr: "too many errors",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
resourceClients, repoResources, progress := tt.setupMocks(t)
err := ExportSpecificResources(context.Background(), "test-repo", tt.options, resourceClients, repoResources, progress)
if tt.wantErr != "" {
require.EqualError(t, err, tt.wantErr)
} else {
require.NoError(t, err)
}
if tt.assertResults != nil {
tt.assertResults(t, resourceClients, repoResources, progress)
}
})
}
}

View File

@@ -21,26 +21,29 @@ type ExportFn func(ctx context.Context, repoName string, options provisioning.Ex
type WrapWithStageFn func(ctx context.Context, repo repository.Repository, stageOptions repository.StageOptions, fn func(repo repository.Repository, staged bool) error) error
type ExportWorker struct {
clientFactory resources.ClientFactory
repositoryResources resources.RepositoryResourcesFactory
exportFn ExportFn
wrapWithStageFn WrapWithStageFn
metrics jobs.JobMetrics
clientFactory resources.ClientFactory
repositoryResources resources.RepositoryResourcesFactory
exportAllFn ExportFn
exportSpecificResourcesFn ExportFn
wrapWithStageFn WrapWithStageFn
metrics jobs.JobMetrics
}
func NewExportWorker(
clientFactory resources.ClientFactory,
repositoryResources resources.RepositoryResourcesFactory,
exportFn ExportFn,
exportAllFn ExportFn,
exportSpecificResourcesFn ExportFn,
wrapWithStageFn WrapWithStageFn,
metrics jobs.JobMetrics,
) *ExportWorker {
return &ExportWorker{
clientFactory: clientFactory,
repositoryResources: repositoryResources,
exportFn: exportFn,
wrapWithStageFn: wrapWithStageFn,
metrics: metrics,
clientFactory: clientFactory,
repositoryResources: repositoryResources,
exportAllFn: exportAllFn,
exportSpecificResourcesFn: exportSpecificResourcesFn,
wrapWithStageFn: wrapWithStageFn,
metrics: metrics,
}
}
@@ -100,7 +103,19 @@ func (r *ExportWorker) Process(ctx context.Context, repo repository.Repository,
return fmt.Errorf("create repository resource client: %w", err)
}
return r.exportFn(ctx, cfg.Name, *options, clients, repositoryResources, progress)
// Check if Resources list is provided (specific resources export mode)
if len(options.Resources) > 0 {
progress.SetTotal(ctx, len(options.Resources))
progress.StrictMaxErrors(1) // Fail fast on any error during export
// Validate that specific resource export is only used with folder sync targets
if cfg.Spec.Sync.Target != provisioning.SyncTargetTypeFolder {
return fmt.Errorf("specific resource export is only supported for folder sync targets, but repository has target type '%s'", cfg.Spec.Sync.Target)
}
return r.exportSpecificResourcesFn(ctx, cfg.Name, *options, clients, repositoryResources, progress)
}
// Fall back to existing ExportAll behavior for backward compatibility
return r.exportAllFn(ctx, cfg.Name, *options, clients, repositoryResources, progress)
}
err := r.wrapWithStageFn(ctx, repo, cloneOptions, fn)

View File

@@ -56,7 +56,7 @@ func TestExportWorker_IsSupported(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
r := NewExportWorker(nil, nil, nil, nil, metrics)
r := NewExportWorker(nil, nil, nil, nil, nil, metrics)
got := r.IsSupported(context.Background(), tt.job)
require.Equal(t, tt.want, got)
})
@@ -70,7 +70,7 @@ func TestExportWorker_ProcessNoExportSettings(t *testing.T) {
},
}
r := NewExportWorker(nil, nil, nil, nil, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(nil, nil, nil, nil, nil, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
err := r.Process(context.Background(), nil, job, nil)
require.EqualError(t, err, "missing export settings")
}
@@ -93,7 +93,7 @@ func TestExportWorker_ProcessWriteNotAllowed(t *testing.T) {
},
})
r := NewExportWorker(nil, nil, nil, nil, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(nil, nil, nil, nil, nil, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
err := r.Process(context.Background(), mockRepo, job, nil)
require.EqualError(t, err, "this repository is read only")
}
@@ -117,7 +117,7 @@ func TestExportWorker_ProcessBranchNotAllowedForLocal(t *testing.T) {
},
})
r := NewExportWorker(nil, nil, nil, nil, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(nil, nil, nil, nil, nil, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
err := r.Process(context.Background(), mockRepo, job, nil)
require.EqualError(t, err, "this repository does not support the branch workflow")
}
@@ -149,7 +149,7 @@ func TestExportWorker_ProcessFailedToCreateClients(t *testing.T) {
return fn(repo, true)
})
r := NewExportWorker(mockClients, nil, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(mockClients, nil, nil, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
mockProgress := jobs.NewMockJobProgressRecorder(t)
err := r.Process(context.Background(), mockRepo, job, mockProgress)
@@ -185,7 +185,7 @@ func TestExportWorker_ProcessNotReaderWriter(t *testing.T) {
return fn(repo, true)
})
r := NewExportWorker(mockClients, nil, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(mockClients, nil, nil, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
err := r.Process(context.Background(), mockRepo, job, mockProgress)
require.EqualError(t, err, "export job submitted targeting repository that is not a ReaderWriter")
}
@@ -221,7 +221,7 @@ func TestExportWorker_ProcessRepositoryResourcesError(t *testing.T) {
mockStageFn.On("Execute", context.Background(), mockRepo, mock.Anything, mock.Anything).Return(func(ctx context.Context, repo repository.Repository, stageOpts repository.StageOptions, fn func(repository.Repository, bool) error) error {
return fn(repo, true)
})
r := NewExportWorker(mockClients, mockRepoResources, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(mockClients, mockRepoResources, nil, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
err := r.Process(context.Background(), mockRepo, job, mockProgress)
require.EqualError(t, err, "create repository resource client: failed to create repository resources client")
}
@@ -273,7 +273,7 @@ func TestExportWorker_ProcessStageOptions(t *testing.T) {
return fn(repo, true)
})
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
err := r.Process(context.Background(), mockRepo, job, mockProgress)
require.NoError(t, err)
}
@@ -355,7 +355,7 @@ func TestExportWorker_ProcessStageOptionsWithBranch(t *testing.T) {
return fn(repo, true)
})
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
err := r.Process(context.Background(), mockRepo, job, mockProgress)
require.NoError(t, err)
})
@@ -398,7 +398,7 @@ func TestExportWorker_ProcessExportFnError(t *testing.T) {
return fn(repo, true)
})
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
err := r.Process(context.Background(), mockRepo, job, mockProgress)
require.EqualError(t, err, "export failed")
}
@@ -426,7 +426,7 @@ func TestExportWorker_ProcessWrapWithStageFnError(t *testing.T) {
mockStageFn := NewMockWrapWithStageFn(t)
mockStageFn.On("Execute", mock.Anything, mockRepo, mock.Anything, mock.Anything).Return(errors.New("stage failed"))
r := NewExportWorker(nil, nil, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(nil, nil, nil, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
err := r.Process(context.Background(), mockRepo, job, mockProgress)
require.EqualError(t, err, "stage failed")
}
@@ -452,7 +452,7 @@ func TestExportWorker_ProcessBranchNotAllowedForStageableRepositories(t *testing
mockProgress := jobs.NewMockJobProgressRecorder(t)
// No progress messages expected in current implementation
r := NewExportWorker(nil, nil, nil, nil, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(nil, nil, nil, nil, nil, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
err := r.Process(context.Background(), mockRepo, job, mockProgress)
require.EqualError(t, err, "this repository does not support the branch workflow")
}
@@ -504,7 +504,7 @@ func TestExportWorker_ProcessGitRepository(t *testing.T) {
return fn(repo, true)
})
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
err := r.Process(context.Background(), mockRepo, job, mockProgress)
require.NoError(t, err)
}
@@ -550,7 +550,7 @@ func TestExportWorker_ProcessGitRepositoryExportFnError(t *testing.T) {
return fn(repo, true)
})
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
err := r.Process(context.Background(), mockRepo, job, mockProgress)
require.EqualError(t, err, "export failed")
}
@@ -613,7 +613,7 @@ func TestExportWorker_RefURLsSetWithBranch(t *testing.T) {
return fn(mockReaderWriter, true)
})
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
err := r.Process(context.Background(), mockRepoWithURLs, job, mockProgress)
require.NoError(t, err)
@@ -670,7 +670,7 @@ func TestExportWorker_RefURLsNotSetWithoutBranch(t *testing.T) {
return fn(mockReaderWriter, true)
})
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
err := r.Process(context.Background(), mockRepoWithURLs, job, mockProgress)
require.NoError(t, err)
@@ -727,7 +727,7 @@ func TestExportWorker_RefURLsNotSetForNonURLRepository(t *testing.T) {
return fn(mockReaderWriter, true)
})
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
err := r.Process(context.Background(), mockRepo, job, mockProgress)
require.NoError(t, err)

View File

@@ -705,6 +705,7 @@ func (b *APIBuilder) GetPostStartHooks() (map[string]genericapiserver.PostStartH
b.clients,
b.repositoryResources,
export.ExportAll,
export.ExportSpecificResources,
stageIfPossible,
metrics,
)

View File

@@ -167,30 +167,40 @@ func (r *ResourcesManager) WriteResourceFileFromObject(ctx context.Context, obj
title = name
}
folder := meta.GetFolder()
// Get the absolute path of the folder
rootFolder := RootFolder(r.repo.Config())
fileName := slugify.Slugify(title) + ".json"
// If no folder is specified in the file, set it to the root to ensure everything is written under it
var fid Folder
if folder == "" {
fid = Folder{ID: rootFolder}
meta.SetFolder(rootFolder) // Set the folder in the metadata to the root folder
} else {
var ok bool
fid, ok = r.folders.Tree().DirPath(folder, rootFolder)
if !ok {
return "", fmt.Errorf("folder %s NOT found in tree with root: %s", folder, rootFolder)
// Build the full path: start with options.Path, then add folder path, then filename
basePath := options.Path
// If options.Path is provided, use it directly (it already includes folder structure from export).
// Otherwise, resolve folder path from the repository tree.
if basePath == "" {
folder := meta.GetFolder()
// Get the absolute path of the folder
rootFolder := RootFolder(r.repo.Config())
if folder == "" {
// If no folder is specified and no path is provided, set it to the root to ensure everything is written under it
meta.SetFolder(rootFolder) // Set the folder in the metadata to the root folder
} else {
var ok bool
var fid Folder
fid, ok = r.folders.Tree().DirPath(folder, rootFolder)
if !ok {
// Fallback: try without rootFolder (for instance targets where rootFolder is empty)
fid, ok = r.folders.Tree().DirPath(folder, "")
if !ok {
return "", fmt.Errorf("folder %s NOT found in tree", folder)
}
}
if fid.Path != "" {
basePath = fid.Path
}
}
}
fileName := slugify.Slugify(title) + ".json"
if fid.Path != "" {
fileName = safepath.Join(fid.Path, fileName)
}
if options.Path != "" {
fileName = safepath.Join(options.Path, fileName)
if basePath != "" {
fileName = safepath.Join(basePath, fileName)
}
parsed := ParsedResource{

View File

@@ -145,6 +145,8 @@ func (t *folderTree) AddUnstructured(item *unstructured.Unstructured) error {
return fmt.Errorf("extract meta accessor: %w", err)
}
// In Grafana, folder UIDs are stored as metadata.name
// The grafana.app/folder annotation contains the folder's metadata.name (which is its Grafana UID)
folder := Folder{
Title: meta.FindTitle(item.GetName()),
ID: item.GetName(),

View File

@@ -3288,6 +3288,18 @@
"path": {
"description": "FIXME: we should validate this in admission hooks Prefix in target file system",
"type": "string"
},
"resources": {
"description": "Resources to export This option has been created because currently the frontend does not use standarized app platform APIs. For performance and API consistency reasons, the preferred option is it to use the resources.",
"type": "array",
"items": {
"default": {},
"allOf": [
{
"$ref": "#/components/schemas/com.github.grafana.grafana.apps.provisioning.pkg.apis.provisioning.v0alpha1.ResourceRef"
}
]
}
}
}
},

View File

@@ -0,0 +1,390 @@
package provisioning
import (
"context"
"encoding/json"
"os"
"path/filepath"
"testing"
"github.com/stretchr/testify/require"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
provisioning "github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1"
"github.com/grafana/grafana/pkg/apimachinery/utils"
"github.com/grafana/grafana/pkg/util/testutil"
)
func TestIntegrationProvisioning_ExportSpecificResources(t *testing.T) {
testutil.SkipIntegrationTestInShortMode(t)
helper := runGrafana(t)
ctx := context.Background()
// Create unmanaged dashboards directly in Grafana
dashboard1 := helper.LoadYAMLOrJSONFile("exportunifiedtorepository/dashboard-test-v1.yaml")
dashboard1Obj, err := helper.DashboardsV1.Resource.Create(ctx, dashboard1, metav1.CreateOptions{})
require.NoError(t, err, "should be able to create first dashboard")
dashboard1Name := dashboard1Obj.GetName()
dashboard2 := helper.LoadYAMLOrJSONFile("exportunifiedtorepository/dashboard-test-v2beta1.yaml")
dashboard2Obj, err := helper.DashboardsV2beta1.Resource.Create(ctx, dashboard2, metav1.CreateOptions{})
require.NoError(t, err, "should be able to create second dashboard")
dashboard2Name := dashboard2Obj.GetName()
// Verify dashboards are unmanaged
dash1, err := helper.DashboardsV1.Resource.Get(ctx, dashboard1Name, metav1.GetOptions{})
require.NoError(t, err)
manager1, found1 := dash1.GetAnnotations()[utils.AnnoKeyManagerIdentity]
require.True(t, !found1 || manager1 == "", "dashboard1 should be unmanaged")
dash2, err := helper.DashboardsV2beta1.Resource.Get(ctx, dashboard2Name, metav1.GetOptions{})
require.NoError(t, err)
manager2, found2 := dash2.GetAnnotations()[utils.AnnoKeyManagerIdentity]
require.True(t, !found2 || manager2 == "", "dashboard2 should be unmanaged")
// Create repository with folder sync target (required for specific resource export)
const repo = "export-resources-test-repo"
testRepo := TestRepo{
Name: repo,
Target: "folder",
Copies: map[string]string{},
ExpectedDashboards: 0, // No dashboards expected after sync (we'll export manually)
ExpectedFolders: 0,
SkipResourceAssertions: true, // Skip assertions since we created dashboards before repo
}
helper.CreateRepo(t, testRepo)
// Export specific dashboards using Resources field
spec := provisioning.JobSpec{
Action: provisioning.JobActionPush,
Push: &provisioning.ExportJobOptions{
Path: "",
Resources: []provisioning.ResourceRef{
{
Name: dashboard1Name,
Kind: "Dashboard",
Group: "dashboard.grafana.app",
},
{
Name: dashboard2Name,
Kind: "Dashboard",
Group: "dashboard.grafana.app",
},
},
},
}
helper.TriggerJobAndWaitForSuccess(t, repo, spec)
// Verify both dashboards were exported
dashboard1File := filepath.Join(helper.ProvisioningPath, "test-dashboard-created-at-v1.json")
dashboard2File := filepath.Join(helper.ProvisioningPath, "test-dashboard-created-at-v2beta1.json")
// Check dashboard1
body1, err := os.ReadFile(dashboard1File) //nolint:gosec
require.NoError(t, err, "exported file should exist for dashboard1")
obj1 := map[string]any{}
err = json.Unmarshal(body1, &obj1)
require.NoError(t, err, "exported file should be valid JSON")
val, _, err := unstructured.NestedString(obj1, "metadata", "name")
require.NoError(t, err)
require.Equal(t, "test-v1", val)
// Check dashboard2
body2, err := os.ReadFile(dashboard2File) //nolint:gosec
require.NoError(t, err, "exported file should exist for dashboard2")
obj2 := map[string]any{}
err = json.Unmarshal(body2, &obj2)
require.NoError(t, err, "exported file should be valid JSON")
val, _, err = unstructured.NestedString(obj2, "metadata", "name")
require.NoError(t, err)
require.Equal(t, "test-v2beta1", val)
}
func TestIntegrationProvisioning_ExportSpecificResourcesWithPath(t *testing.T) {
testutil.SkipIntegrationTestInShortMode(t)
helper := runGrafana(t)
ctx := context.Background()
// Create unmanaged dashboard
dashboard := helper.LoadYAMLOrJSONFile("exportunifiedtorepository/dashboard-test-v1.yaml")
dashboardObj, err := helper.DashboardsV1.Resource.Create(ctx, dashboard, metav1.CreateOptions{})
require.NoError(t, err, "should be able to create dashboard")
dashboardName := dashboardObj.GetName()
// Create repository with folder sync target (required for specific resource export)
const repo = "export-resources-path-test-repo"
testRepo := TestRepo{
Name: repo,
Target: "folder",
Copies: map[string]string{},
ExpectedDashboards: 0,
ExpectedFolders: 0,
SkipResourceAssertions: true, // Skip assertions since we created dashboard before repo
}
helper.CreateRepo(t, testRepo)
// Export with custom path
spec := provisioning.JobSpec{
Action: provisioning.JobActionPush,
Push: &provisioning.ExportJobOptions{
Path: "custom/path",
Resources: []provisioning.ResourceRef{
{
Name: dashboardName,
Kind: "Dashboard",
Group: "dashboard.grafana.app",
},
},
},
}
helper.TriggerJobAndWaitForSuccess(t, repo, spec)
// Verify dashboard was exported to custom path
expectedFile := filepath.Join(helper.ProvisioningPath, "custom", "path", "test-dashboard-created-at-v1.json")
body, err := os.ReadFile(expectedFile) //nolint:gosec
require.NoError(t, err, "exported file should exist at custom path")
obj := map[string]any{}
err = json.Unmarshal(body, &obj)
require.NoError(t, err, "exported file should be valid JSON")
val, _, err := unstructured.NestedString(obj, "metadata", "name")
require.NoError(t, err)
require.Equal(t, "test-v1", val)
}
func TestIntegrationProvisioning_ExportSpecificResourcesRejectsFolders(t *testing.T) {
testutil.SkipIntegrationTestInShortMode(t)
helper := runGrafana(t)
ctx := context.Background()
// Create a folder
folder := &unstructured.Unstructured{
Object: map[string]any{
"apiVersion": "folder.grafana.app/v1beta1",
"kind": "Folder",
"metadata": map[string]any{
"name": "test-folder",
},
"spec": map[string]any{
"title": "Test Folder",
},
},
}
folderObj, err := helper.Folders.Resource.Create(ctx, folder, metav1.CreateOptions{})
require.NoError(t, err, "should be able to create folder")
folderName := folderObj.GetName()
// Create repository with folder sync target (required for specific resource export)
const repo = "export-reject-folders-test-repo"
testRepo := TestRepo{
Name: repo,
Target: "folder",
Copies: map[string]string{},
ExpectedDashboards: 0,
ExpectedFolders: 0,
SkipResourceAssertions: true, // Skip assertions since we created folder before repo
}
helper.CreateRepo(t, testRepo)
// Try to export folder (should fail validation)
spec := provisioning.JobSpec{
Action: provisioning.JobActionPush,
Push: &provisioning.ExportJobOptions{
Resources: []provisioning.ResourceRef{
{
Name: folderName,
Kind: "Folder",
Group: "folder.grafana.app",
},
},
},
}
// This should fail with validation error
body := asJSON(spec)
result := helper.AdminREST.Post().
Namespace("default").
Resource("repositories").
Name(repo).
SubResource("jobs").
Body(body).
SetHeader("Content-Type", "application/json").
Do(ctx)
err = result.Error()
require.Error(t, err, "should fail validation when trying to export folder")
require.Contains(t, err.Error(), "folders are not supported", "error should mention folders are not supported")
}
func TestIntegrationProvisioning_ExportSpecificResourcesRejectsManagedResources(t *testing.T) {
testutil.SkipIntegrationTestInShortMode(t)
helper := runGrafana(t)
ctx := context.Background()
// Create a managed dashboard via repository sync (use folder target to allow second repo)
testRepo := TestRepo{
Name: "managed-dashboard-repo",
Target: "folder",
Copies: map[string]string{
"exportunifiedtorepository/dashboard-test-v1.yaml": "dashboard.json",
},
ExpectedDashboards: 1,
ExpectedFolders: 1, // Folder target creates a folder with the repo name
SkipResourceAssertions: true, // Skip assertions since we're testing export, not sync
}
helper.CreateRepo(t, testRepo)
// Get the managed dashboard
dashboards, err := helper.DashboardsV1.Resource.List(ctx, metav1.ListOptions{})
require.NoError(t, err)
require.Len(t, dashboards.Items, 1, "should have one managed dashboard")
managedDashboard := dashboards.Items[0]
managedDashboardName := managedDashboard.GetName()
// Verify it's managed
manager, found := managedDashboard.GetAnnotations()[utils.AnnoKeyManagerIdentity]
require.True(t, found && manager != "", "dashboard should be managed")
// Create another repository for export (must be folder target since instance can only exist alone)
const exportRepo = "export-managed-reject-test-repo"
exportTestRepo := TestRepo{
Name: exportRepo,
Target: "folder",
Copies: map[string]string{},
ExpectedDashboards: 0,
ExpectedFolders: 0,
SkipResourceAssertions: true, // Skip assertions since we're testing export, not sync
}
helper.CreateRepo(t, exportTestRepo)
// Try to export managed dashboard (should fail)
spec := provisioning.JobSpec{
Action: provisioning.JobActionPush,
Push: &provisioning.ExportJobOptions{
Resources: []provisioning.ResourceRef{
{
Name: managedDashboardName,
Kind: "Dashboard",
Group: "dashboard.grafana.app",
},
},
},
}
// This should fail because the resource is managed
body := asJSON(spec)
result := helper.AdminREST.Post().
Namespace("default").
Resource("repositories").
Name(exportRepo).
SubResource("jobs").
Body(body).
SetHeader("Content-Type", "application/json").
Do(ctx)
// Wait for job to complete and check it failed
obj, err := result.Get()
require.NoError(t, err, "job should be created")
unstruct, ok := obj.(*unstructured.Unstructured)
require.True(t, ok, "should get unstructured object")
// Wait for job to complete
job := helper.AwaitJob(t, ctx, unstruct)
lastState := mustNestedString(job.Object, "status", "state")
lastErrors := mustNestedStringSlice(job.Object, "status", "errors")
// Job should fail with error about managed resource
require.Equal(t, string(provisioning.JobStateError), lastState, "job should fail")
require.NotEmpty(t, lastErrors, "job should have errors")
require.Contains(t, lastErrors[0], "managed", "error should mention managed resource")
}
func TestIntegrationProvisioning_ExportSpecificResourcesWithFolderStructure(t *testing.T) {
testutil.SkipIntegrationTestInShortMode(t)
helper := runGrafana(t)
ctx := context.Background()
// Create an unmanaged folder
folder := &unstructured.Unstructured{
Object: map[string]any{
"apiVersion": "folder.grafana.app/v1beta1",
"kind": "Folder",
"metadata": map[string]any{
"name": "test-export-folder",
},
"spec": map[string]any{
"title": "Test Export Folder",
},
},
}
folderObj, err := helper.Folders.Resource.Create(ctx, folder, metav1.CreateOptions{})
require.NoError(t, err, "should be able to create folder")
folderUID := folderObj.GetUID()
// Verify folder is unmanaged
manager, found := folderObj.GetAnnotations()[utils.AnnoKeyManagerIdentity]
require.True(t, !found || manager == "", "folder should be unmanaged")
// Create unmanaged dashboard in the folder
dashboard := helper.LoadYAMLOrJSONFile("exportunifiedtorepository/dashboard-test-v1.yaml")
// Set folder UID in dashboard spec
err = unstructured.SetNestedField(dashboard.Object, string(folderUID), "spec", "folder")
require.NoError(t, err, "should be able to set folder UID")
dashboardObj, err := helper.DashboardsV1.Resource.Create(ctx, dashboard, metav1.CreateOptions{})
require.NoError(t, err, "should be able to create dashboard in folder")
dashboardName := dashboardObj.GetName()
// Create repository with folder sync target (required for specific resource export)
const repo = "export-folder-structure-test-repo"
testRepo := TestRepo{
Name: repo,
Target: "folder",
Copies: map[string]string{},
ExpectedDashboards: 0,
ExpectedFolders: 0,
SkipResourceAssertions: true, // Skip assertions since we created folder and dashboard before repo
}
helper.CreateRepo(t, testRepo)
// Export dashboard (should preserve folder structure)
spec := provisioning.JobSpec{
Action: provisioning.JobActionPush,
Push: &provisioning.ExportJobOptions{
Path: "",
Resources: []provisioning.ResourceRef{
{
Name: dashboardName,
Kind: "Dashboard",
Group: "dashboard.grafana.app",
},
},
},
}
helper.TriggerJobAndWaitForSuccess(t, repo, spec)
// For folder sync targets with specific resource export, the folder structure
// from unmanaged folders should be preserved in the export path
// Expected: <provisioning_path>/<folder_name>/<dashboard>.json
expectedFile := filepath.Join(helper.ProvisioningPath, "Test Export Folder", "test-dashboard-created-at-v1.json")
body, err := os.ReadFile(expectedFile) //nolint:gosec
if err != nil {
// Fallback: if folder structure not preserved, file might be at root
expectedFile = filepath.Join(helper.ProvisioningPath, "test-dashboard-created-at-v1.json")
body, err = os.ReadFile(expectedFile) //nolint:gosec
require.NoError(t, err, "exported file should exist (either with folder structure or at root)")
t.Logf("Note: Dashboard exported to root instead of preserving folder structure")
}
obj := map[string]any{}
err = json.Unmarshal(body, &obj)
require.NoError(t, err, "exported file should be valid JSON")
val, _, err := unstructured.NestedString(obj, "metadata", "name")
require.NoError(t, err)
require.Equal(t, "test-v1", val)
}