From 70ee529341bfced739cb3cc2749c98aeef54f973 Mon Sep 17 00:00:00 2001 From: Ilya Kuznetsov Date: Wed, 14 Jan 2026 13:31:23 +0100 Subject: [PATCH 01/10] Command placeholder --- bundle/configsync/diff.go | 46 +++++++++++++++ bundle/configsync/format.go | 30 ++++++++++ bundle/configsync/output.go | 16 ++++++ bundle/configsync/yaml_generator.go | 18 ++++++ cmd/bundle/debug.go | 1 + cmd/bundle/debug/config_remote_sync.go | 80 ++++++++++++++++++++++++++ 6 files changed, 191 insertions(+) create mode 100644 bundle/configsync/diff.go create mode 100644 bundle/configsync/format.go create mode 100644 bundle/configsync/output.go create mode 100644 bundle/configsync/yaml_generator.go create mode 100644 cmd/bundle/debug/config_remote_sync.go diff --git a/bundle/configsync/diff.go b/bundle/configsync/diff.go new file mode 100644 index 0000000000..de80b5a12b --- /dev/null +++ b/bundle/configsync/diff.go @@ -0,0 +1,46 @@ +package configsync + +import ( + "context" + "fmt" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/deployplan" + "github.com/databricks/cli/bundle/direct" + "github.com/databricks/cli/libs/log" +) + +// DetectChanges compares current remote state with the last deployed state +// and returns a map of resource changes. +func DetectChanges(ctx context.Context, b *bundle.Bundle) (map[string]deployplan.Changes, error) { + changes := make(map[string]deployplan.Changes) + + deployBundle := &direct.DeploymentBundle{} + // TODO: for Terraform engine we should read the state file, converted to direct state format, it should be created during deployment + _, statePath := b.StateFilenameDirect(ctx) + + plan, err := deployBundle.CalculatePlan(ctx, b.WorkspaceClient(), &b.Config, statePath) + if err != nil { + return nil, fmt.Errorf("failed to calculate plan: %w", err) + } + + for resourceKey, entry := range plan.Plan { + resourceChanges := make(deployplan.Changes) + + if entry.Changes != nil { + for path, changeDesc := range entry.Changes { + if changeDesc.Remote != nil && changeDesc.Action != deployplan.Skip { + resourceChanges[path] = changeDesc + } + } + } + + if len(resourceChanges) != 0 { + changes[resourceKey] = resourceChanges + } + + log.Debugf(ctx, "Resource %s has %d changes", resourceKey, len(resourceChanges)) + } + + return changes, nil +} diff --git a/bundle/configsync/format.go b/bundle/configsync/format.go new file mode 100644 index 0000000000..e4416e0c78 --- /dev/null +++ b/bundle/configsync/format.go @@ -0,0 +1,30 @@ +package configsync + +import ( + "fmt" + "strings" + + "github.com/databricks/cli/bundle/deployplan" +) + +// FormatTextOutput formats the config changes as human-readable text. Useful for debugging +func FormatTextOutput(changes map[string]deployplan.Changes) string { + var output strings.Builder + + if len(changes) == 0 { + output.WriteString("No changes detected.\n") + return output.String() + } + + output.WriteString(fmt.Sprintf("Detected changes in %d resource(s):\n\n", len(changes))) + + for resourceKey, resourceChanges := range changes { + output.WriteString(fmt.Sprintf("Resource: %s\n", resourceKey)) + + for path, changeDesc := range resourceChanges { + output.WriteString(fmt.Sprintf(" %s: %s\n", path, changeDesc.Action)) + } + } + + return output.String() +} diff --git a/bundle/configsync/output.go b/bundle/configsync/output.go new file mode 100644 index 0000000000..b69de0cd78 --- /dev/null +++ b/bundle/configsync/output.go @@ -0,0 +1,16 @@ +package configsync + +import "github.com/databricks/cli/bundle/deployplan" + +// FileChange represents a change to a bundle configuration file +type FileChange struct { + Path string `json:"path"` + OriginalContent string `json:"originalContent"` + ModifiedContent string `json:"modifiedContent"` +} + +// DiffOutput represents the complete output of the config-remote-sync command +type DiffOutput struct { + Files []FileChange `json:"files"` + Changes map[string]deployplan.Changes `json:"changes"` +} diff --git a/bundle/configsync/yaml_generator.go b/bundle/configsync/yaml_generator.go new file mode 100644 index 0000000000..7b563ea7a1 --- /dev/null +++ b/bundle/configsync/yaml_generator.go @@ -0,0 +1,18 @@ +package configsync + +import ( + "context" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/deployplan" +) + +// GenerateYAMLFiles generates YAML files for the given changes. +func GenerateYAMLFiles(ctx context.Context, b *bundle.Bundle, changes map[string]deployplan.Changes) ([]FileChange, error) { + return nil, nil +} + +// SaveFiles writes all file changes to disk. +func SaveFiles(ctx context.Context, b *bundle.Bundle, files []FileChange) error { + return nil +} diff --git a/cmd/bundle/debug.go b/cmd/bundle/debug.go index b912e14fe2..f0bd6c83ed 100644 --- a/cmd/bundle/debug.go +++ b/cmd/bundle/debug.go @@ -16,5 +16,6 @@ func newDebugCommand() *cobra.Command { cmd.AddCommand(debug.NewTerraformCommand()) cmd.AddCommand(debug.NewRefSchemaCommand()) cmd.AddCommand(debug.NewStatesCommand()) + cmd.AddCommand(debug.NewConfigRemoteSyncCommand()) return cmd } diff --git a/cmd/bundle/debug/config_remote_sync.go b/cmd/bundle/debug/config_remote_sync.go new file mode 100644 index 0000000000..d2214abd2c --- /dev/null +++ b/cmd/bundle/debug/config_remote_sync.go @@ -0,0 +1,80 @@ +package debug + +import ( + "encoding/json" + "fmt" + + "github.com/databricks/cli/bundle/configsync" + "github.com/databricks/cli/cmd/bundle/utils" + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/flags" + "github.com/spf13/cobra" +) + +func NewConfigRemoteSyncCommand() *cobra.Command { + var save bool + + cmd := &cobra.Command{ + Use: "config-remote-sync", + Short: "Sync remote resource changes to bundle configuration (experimental)", + Long: `Compares deployed state with current remote state and generates updated configuration files. + +When --save is specified, writes updated YAML files to disk. +Otherwise, outputs diff without modifying files. + +Examples: + # Show diff without saving + databricks bundle debug config-remote-sync + + # Show diff and save to files + databricks bundle debug config-remote-sync --save`, + Hidden: true, // Used by DABs in the Workspace only + } + + cmd.Flags().BoolVar(&save, "save", false, "Write updated config files to disk") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { + b, _, err := utils.ProcessBundleRet(cmd, utils.ProcessOptions{}) + if err != nil { + return err + } + + ctx := cmd.Context() + changes, err := configsync.DetectChanges(ctx, b) + if err != nil { + return fmt.Errorf("failed to detect changes: %w", err) + } + + files, err := configsync.GenerateYAMLFiles(ctx, b, changes) + if err != nil { + return fmt.Errorf("failed to generate YAML files: %w", err) + } + + if save { + if err := configsync.SaveFiles(ctx, b, files); err != nil { + return fmt.Errorf("failed to save files: %w", err) + } + } + + result := []byte{} + if root.OutputType(cmd) == flags.OutputJSON { + diffOutput := &configsync.DiffOutput{ + Files: files, + Changes: changes, + } + result, err = json.MarshalIndent(diffOutput, "", " ") + if err != nil { + return fmt.Errorf("failed to marshal output: %w", err) + } + } else if root.OutputType(cmd) == flags.OutputText { + result = []byte(configsync.FormatTextOutput(changes)) + } + + out := cmd.OutOrStdout() + _, _ = out.Write(result) + _, _ = out.Write([]byte{'\n'}) + return nil + } + + return cmd +} From b9d9afcdae1208e6f963236803519bf085fdbb75 Mon Sep 17 00:00:00 2001 From: Ilya Kuznetsov Date: Wed, 14 Jan 2026 16:12:55 +0100 Subject: [PATCH 02/10] First iteration of YAML generation --- bundle/configsync/yaml_generator.go | 295 ++++++++++++- bundle/configsync/yaml_generator_test.go | 510 +++++++++++++++++++++++ cmd/bundle/debug/config_remote_sync.go | 2 +- 3 files changed, 805 insertions(+), 2 deletions(-) create mode 100644 bundle/configsync/yaml_generator_test.go diff --git a/bundle/configsync/yaml_generator.go b/bundle/configsync/yaml_generator.go index 7b563ea7a1..03af3c40e6 100644 --- a/bundle/configsync/yaml_generator.go +++ b/bundle/configsync/yaml_generator.go @@ -1,15 +1,308 @@ package configsync import ( + "bytes" "context" + "errors" + "fmt" + "os" + "strings" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/deployplan" + "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn/yamlloader" + "github.com/databricks/cli/libs/log" + "github.com/databricks/cli/libs/structs/structpath" + "gopkg.in/yaml.v3" ) +// resourceKeyToDynPath converts a resource key to a dyn.Path +// Example: "resources.jobs.my_job" -> Path{Key("resources"), Key("jobs"), Key("my_job")} +func resourceKeyToDynPath(resourceKey string) (dyn.Path, error) { + if resourceKey == "" { + return nil, errors.New("invalid resource key: empty string") + } + + parts := strings.Split(resourceKey, ".") + if len(parts) == 0 { + return nil, fmt.Errorf("invalid resource key: %s", resourceKey) + } + + path := make(dyn.Path, len(parts)) + for i, part := range parts { + path[i] = dyn.Key(part) + } + + return path, nil +} + +// getResourceWithLocation retrieves a resource dyn.Value and its file location +// Uses the dynamic config value, not typed structures +func getResourceWithLocation(configValue dyn.Value, resourceKey string) (dyn.Value, dyn.Location, error) { + path, err := resourceKeyToDynPath(resourceKey) + if err != nil { + return dyn.NilValue, dyn.Location{}, err + } + + resource, err := dyn.GetByPath(configValue, path) + if err != nil { + return dyn.NilValue, dyn.Location{}, fmt.Errorf("resource %s not found: %w", resourceKey, err) + } + + return resource, resource.Location(), nil +} + +// structpathToDynPath converts a structpath string to a dyn.Path +// Example: "tasks[0].timeout_seconds" -> Path{Key("tasks"), Index(0), Key("timeout_seconds")} +// Also supports "tasks[task_key='my_task']" syntax for array element selection by field value +func structpathToDynPath(_ context.Context, pathStr string, baseValue dyn.Value) (dyn.Path, error) { + node, err := structpath.Parse(pathStr) + if err != nil { + return nil, fmt.Errorf("failed to parse path %s: %w", pathStr, err) + } + + // Convert PathNode linked list to slice for forward iteration + nodes := node.AsSlice() + + var dynPath dyn.Path + currentValue := baseValue + + for _, n := range nodes { + // Check for string key (field access) + if key, ok := n.StringKey(); ok { + dynPath = append(dynPath, dyn.Key(key)) + + // Update currentValue for next iteration + if currentValue.IsValid() { + currentValue, _ = dyn.GetByPath(currentValue, dyn.Path{dyn.Key(key)}) + } + continue + } + + // Check for numeric index + if idx, ok := n.Index(); ok { + dynPath = append(dynPath, dyn.Index(idx)) + + // Update currentValue for next iteration + if currentValue.IsValid() { + currentValue, _ = dyn.GetByPath(currentValue, dyn.Path{dyn.Index(idx)}) + } + continue + } + + // Check for key-value selector: [key='value'] + if key, value, ok := n.KeyValue(); ok { + // Need to search the array to find the matching index + if !currentValue.IsValid() || currentValue.Kind() != dyn.KindSequence { + return nil, fmt.Errorf("cannot apply [key='value'] selector to non-array value at path %s", dynPath.String()) + } + + seq, _ := currentValue.AsSequence() + foundIndex := -1 + + for i, elem := range seq { + keyValue, err := dyn.GetByPath(elem, dyn.Path{dyn.Key(key)}) + if err != nil { + continue + } + + // Compare the key value + if keyValue.Kind() == dyn.KindString && keyValue.MustString() == value { + foundIndex = i + break + } + } + + if foundIndex == -1 { + return nil, fmt.Errorf("no array element found with %s='%s' at path %s", key, value, dynPath.String()) + } + + dynPath = append(dynPath, dyn.Index(foundIndex)) + currentValue = seq[foundIndex] + continue + } + + // Skip wildcards or other special node types + if n.DotStar() || n.BracketStar() { + return nil, errors.New("wildcard patterns are not supported in field paths") + } + } + + return dynPath, nil +} + +// applyChanges applies all field changes to a resource dyn.Value +func applyChanges(ctx context.Context, resource dyn.Value, changes deployplan.Changes) (dyn.Value, error) { + result := resource + + for fieldPath, changeDesc := range changes { + // Skip if no remote value or action is Skip + if changeDesc.Remote == nil || changeDesc.Action == deployplan.Skip { + continue + } + + // Convert structpath to dyn.Path + dynPath, err := structpathToDynPath(ctx, fieldPath, result) + if err != nil { + log.Warnf(ctx, "Failed to parse field path %s: %v", fieldPath, err) + continue + } + + // Set the remote value at the path + remoteValue := dyn.V(changeDesc.Remote) + result, err = dyn.SetByPath(result, dynPath, remoteValue) + if err != nil { + log.Warnf(ctx, "Failed to set value at path %s: %v", fieldPath, err) + continue + } + } + + return result, nil +} + +// dynValueToYAML converts a dyn.Value to a YAML string +func dynValueToYAML(v dyn.Value) (string, error) { + var buf bytes.Buffer + enc := yaml.NewEncoder(&buf) + enc.SetIndent(2) + + if err := enc.Encode(v.AsAny()); err != nil { + return "", err + } + + return buf.String(), nil +} + +// parseResourceKey extracts resource type and name from a resource key +// Example: "resources.jobs.my_job" -> type="jobs", name="my_job" +func parseResourceKey(resourceKey string) (resourceType, resourceName string, err error) { + parts := strings.Split(resourceKey, ".") + if len(parts) < 3 || parts[0] != "resources" { + return "", "", fmt.Errorf("invalid resource key format: %s (expected resources.TYPE.NAME)", resourceKey) + } + + return parts[1], parts[2], nil +} + +// findResourceInFile searches for a resource within a loaded file's dyn.Value +func findResourceInFile(_ context.Context, fileValue dyn.Value, resourceType, resourceName string) (dyn.Value, dyn.Path, error) { + // Try direct path first: resources.TYPE.NAME + directPath := dyn.Path{dyn.Key("resources"), dyn.Key(resourceType), dyn.Key(resourceName)} + resource, err := dyn.GetByPath(fileValue, directPath) + if err == nil { + return resource, directPath, nil + } + + // If not found, search with pattern for nested resources (e.g., in target overrides) + pattern := dyn.MustPatternFromString(fmt.Sprintf("**.resources.%s.%s", resourceType, resourceName)) + var foundResource dyn.Value + var foundPath dyn.Path + + _, _ = dyn.MapByPattern(fileValue, pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { + foundResource = v + foundPath = p + return v, nil + }) + + if foundResource.IsValid() { + return foundResource, foundPath, nil + } + + return dyn.NilValue, nil, fmt.Errorf("resource %s.%s not found in file", resourceType, resourceName) +} + // GenerateYAMLFiles generates YAML files for the given changes. func GenerateYAMLFiles(ctx context.Context, b *bundle.Bundle, changes map[string]deployplan.Changes) ([]FileChange, error) { - return nil, nil + // Get bundle config as dyn.Value + configValue := b.Config.Value() + + // Group changes by file path + fileChanges := make(map[string][]struct { + resourceKey string + changes deployplan.Changes + }) + + for resourceKey, resourceChanges := range changes { + // Get resource location from bundle config + _, loc, err := getResourceWithLocation(configValue, resourceKey) + if err != nil { + log.Warnf(ctx, "Failed to find resource %s in bundle config: %v", resourceKey, err) + continue + } + + filePath := loc.File + fileChanges[filePath] = append(fileChanges[filePath], struct { + resourceKey string + changes deployplan.Changes + }{resourceKey, resourceChanges}) + } + + // Process each file + var result []FileChange + + for filePath, resourcesInFile := range fileChanges { + // Load original file content + content, err := os.ReadFile(filePath) + if err != nil { + log.Warnf(ctx, "Failed to read file %s: %v", filePath, err) + continue + } + + // Load file as dyn.Value + fileValue, err := yamlloader.LoadYAML(filePath, bytes.NewBuffer(content)) + if err != nil { + log.Warnf(ctx, "Failed to parse YAML file %s: %v", filePath, err) + continue + } + + // Apply changes for each resource in this file + for _, item := range resourcesInFile { + // Parse resource key + resourceType, resourceName, err := parseResourceKey(item.resourceKey) + if err != nil { + log.Warnf(ctx, "Failed to parse resource key %s: %v", item.resourceKey, err) + continue + } + + // Find resource in loaded file + resource, resourcePath, err := findResourceInFile(ctx, fileValue, resourceType, resourceName) + if err != nil { + log.Warnf(ctx, "Failed to find resource %s in file %s: %v", item.resourceKey, filePath, err) + continue + } + + // Apply changes to the resource + modifiedResource, err := applyChanges(ctx, resource, item.changes) + if err != nil { + log.Warnf(ctx, "Failed to apply changes to resource %s: %v", item.resourceKey, err) + continue + } + + // Update the file's dyn.Value with modified resource + fileValue, err = dyn.SetByPath(fileValue, resourcePath, modifiedResource) + if err != nil { + log.Warnf(ctx, "Failed to update file value for resource %s: %v", item.resourceKey, err) + continue + } + } + + // Convert modified dyn.Value to YAML string + modifiedContent, err := dynValueToYAML(fileValue) + if err != nil { + log.Warnf(ctx, "Failed to convert modified value to YAML for file %s: %v", filePath, err) + continue + } + + // Create FileChange + result = append(result, FileChange{ + Path: filePath, + OriginalContent: string(content), + ModifiedContent: modifiedContent, + }) + } + + return result, nil } // SaveFiles writes all file changes to disk. diff --git a/bundle/configsync/yaml_generator_test.go b/bundle/configsync/yaml_generator_test.go new file mode 100644 index 0000000000..ac2618d70c --- /dev/null +++ b/bundle/configsync/yaml_generator_test.go @@ -0,0 +1,510 @@ +package configsync + +import ( + "context" + "os" + "path/filepath" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/cli/bundle/deployplan" + "github.com/databricks/cli/libs/logdiag" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" +) + +func TestGenerateYAMLFiles_SimpleFieldChange(t *testing.T) { + ctx := logdiag.InitContext(context.Background()) + + // Create a temporary directory for the bundle + tmpDir := t.TempDir() + + // Create a simple databricks.yml with a job + yamlContent := `resources: + jobs: + test_job: + name: "Test Job" + timeout_seconds: 3600 + tasks: + - task_key: "main_task" + notebook_task: + notebook_path: "/path/to/notebook" +` + + yamlPath := filepath.Join(tmpDir, "databricks.yml") + err := os.WriteFile(yamlPath, []byte(yamlContent), 0o644) + require.NoError(t, err) + + // Load the bundle (pass directory, not file) + b, err := bundle.Load(ctx, tmpDir) + require.NoError(t, err) + + // Initialize the bundle config + mutator.DefaultMutators(ctx, b) + + // Create changes map + changes := map[string]deployplan.Changes{ + "resources.jobs.test_job": { + "timeout_seconds": &deployplan.ChangeDesc{ + Action: deployplan.Update, + Old: 3600, + Remote: 7200, + }, + }, + } + + fileChanges, err := GenerateYAMLFiles(ctx, b, changes) + require.NoError(t, err) + require.Len(t, fileChanges, 1) + + assert.Equal(t, yamlPath, fileChanges[0].Path) + assert.Contains(t, fileChanges[0].OriginalContent, "timeout_seconds: 3600") + assert.Contains(t, fileChanges[0].ModifiedContent, "timeout_seconds: 7200") + assert.NotContains(t, fileChanges[0].ModifiedContent, "timeout_seconds: 3600") +} + +func TestGenerateYAMLFiles_NestedFieldChange(t *testing.T) { + ctx := logdiag.InitContext(context.Background()) + + // Create a temporary directory for the bundle + tmpDir := t.TempDir() + + // Create a simple databricks.yml with a job + yamlContent := `resources: + jobs: + test_job: + name: "Test Job" + tasks: + - task_key: "main_task" + notebook_task: + notebook_path: "/path/to/notebook" + timeout_seconds: 1800 +` + + yamlPath := filepath.Join(tmpDir, "databricks.yml") + err := os.WriteFile(yamlPath, []byte(yamlContent), 0o644) + require.NoError(t, err) + + // Load the bundle (pass directory, not file) + b, err := bundle.Load(ctx, tmpDir) + require.NoError(t, err) + + // Initialize the bundle config + mutator.DefaultMutators(ctx, b) + + // Create changes map for nested field + changes := map[string]deployplan.Changes{ + "resources.jobs.test_job": { + "tasks[0].timeout_seconds": &deployplan.ChangeDesc{ + Action: deployplan.Update, + Old: 1800, + Remote: 3600, + }, + }, + } + + // Generate YAML files + fileChanges, err := GenerateYAMLFiles(ctx, b, changes) + require.NoError(t, err) + require.Len(t, fileChanges, 1) + + // Verify modified content contains the new value + assert.Contains(t, fileChanges[0].ModifiedContent, "timeout_seconds: 3600") + + // Parse YAML to verify structure + var result map[string]any + err = yaml.Unmarshal([]byte(fileChanges[0].ModifiedContent), &result) + require.NoError(t, err) + + // Navigate to verify the change + resources := result["resources"].(map[string]any) + jobs := resources["jobs"].(map[string]any) + testJob := jobs["test_job"].(map[string]any) + tasks := testJob["tasks"].([]any) + task0 := tasks[0].(map[string]any) + + assert.Equal(t, 3600, task0["timeout_seconds"]) +} + +func TestGenerateYAMLFiles_ArrayKeyValueAccess(t *testing.T) { + ctx := logdiag.InitContext(context.Background()) + + // Create a temporary directory for the bundle + tmpDir := t.TempDir() + + // Create a simple databricks.yml with a job with multiple tasks + yamlContent := `resources: + jobs: + test_job: + name: "Test Job" + tasks: + - task_key: "setup_task" + notebook_task: + notebook_path: "/setup" + timeout_seconds: 600 + - task_key: "main_task" + notebook_task: + notebook_path: "/main" + timeout_seconds: 1800 +` + + yamlPath := filepath.Join(tmpDir, "databricks.yml") + err := os.WriteFile(yamlPath, []byte(yamlContent), 0o644) + require.NoError(t, err) + + // Load the bundle (pass directory, not file) + b, err := bundle.Load(ctx, tmpDir) + require.NoError(t, err) + + // Initialize the bundle config + mutator.DefaultMutators(ctx, b) + + // Create changes map using key-value syntax + changes := map[string]deployplan.Changes{ + "resources.jobs.test_job": { + "tasks[task_key='main_task'].timeout_seconds": &deployplan.ChangeDesc{ + Action: deployplan.Update, + Old: 1800, + Remote: 3600, + }, + }, + } + + // Generate YAML files + fileChanges, err := GenerateYAMLFiles(ctx, b, changes) + require.NoError(t, err) + require.Len(t, fileChanges, 1) + + // Parse YAML to verify the correct task was updated + var result map[string]any + err = yaml.Unmarshal([]byte(fileChanges[0].ModifiedContent), &result) + require.NoError(t, err) + + resources := result["resources"].(map[string]any) + jobs := resources["jobs"].(map[string]any) + testJob := jobs["test_job"].(map[string]any) + tasks := testJob["tasks"].([]any) + + // Verify setup_task (index 0) is unchanged + task0 := tasks[0].(map[string]any) + assert.Equal(t, "setup_task", task0["task_key"]) + assert.Equal(t, 600, task0["timeout_seconds"]) + + // Verify main_task (index 1) is updated + task1 := tasks[1].(map[string]any) + assert.Equal(t, "main_task", task1["task_key"]) + assert.Equal(t, 3600, task1["timeout_seconds"]) +} + +func TestGenerateYAMLFiles_MultipleResourcesSameFile(t *testing.T) { + ctx := logdiag.InitContext(context.Background()) + + // Create a temporary directory for the bundle + tmpDir := t.TempDir() + + // Create databricks.yml with multiple jobs + yamlContent := `resources: + jobs: + job1: + name: "Job 1" + timeout_seconds: 3600 + job2: + name: "Job 2" + timeout_seconds: 1800 +` + + yamlPath := filepath.Join(tmpDir, "databricks.yml") + err := os.WriteFile(yamlPath, []byte(yamlContent), 0o644) + require.NoError(t, err) + + // Load the bundle (pass directory, not file) + b, err := bundle.Load(ctx, tmpDir) + require.NoError(t, err) + + // Initialize the bundle config + mutator.DefaultMutators(ctx, b) + + // Create changes for both jobs + changes := map[string]deployplan.Changes{ + "resources.jobs.job1": { + "timeout_seconds": &deployplan.ChangeDesc{ + Action: deployplan.Update, + Old: 3600, + Remote: 7200, + }, + }, + "resources.jobs.job2": { + "timeout_seconds": &deployplan.ChangeDesc{ + Action: deployplan.Update, + Old: 1800, + Remote: 3600, + }, + }, + } + + // Generate YAML files + fileChanges, err := GenerateYAMLFiles(ctx, b, changes) + require.NoError(t, err) + + // Should only have one FileChange since both resources are in the same file + require.Len(t, fileChanges, 1) + assert.Equal(t, yamlPath, fileChanges[0].Path) + + // Verify both changes are applied + assert.Contains(t, fileChanges[0].ModifiedContent, "job1") + assert.Contains(t, fileChanges[0].ModifiedContent, "job2") + + // Parse and verify both jobs are updated + var result map[string]any + err = yaml.Unmarshal([]byte(fileChanges[0].ModifiedContent), &result) + require.NoError(t, err) + + resources := result["resources"].(map[string]any) + jobs := resources["jobs"].(map[string]any) + + job1 := jobs["job1"].(map[string]any) + assert.Equal(t, 7200, job1["timeout_seconds"]) + + job2 := jobs["job2"].(map[string]any) + assert.Equal(t, 3600, job2["timeout_seconds"]) +} + +func TestGenerateYAMLFiles_ResourceNotFound(t *testing.T) { + ctx := logdiag.InitContext(context.Background()) + + // Create a temporary directory for the bundle + tmpDir := t.TempDir() + + // Create a simple databricks.yml + yamlContent := `resources: + jobs: + existing_job: + name: "Existing Job" +` + + yamlPath := filepath.Join(tmpDir, "databricks.yml") + err := os.WriteFile(yamlPath, []byte(yamlContent), 0o644) + require.NoError(t, err) + + // Load the bundle (pass directory, not file) + b, err := bundle.Load(ctx, tmpDir) + require.NoError(t, err) + + // Initialize the bundle config + mutator.DefaultMutators(ctx, b) + + // Create changes for a non-existent resource + changes := map[string]deployplan.Changes{ + "resources.jobs.nonexistent_job": { + "timeout_seconds": &deployplan.ChangeDesc{ + Action: deployplan.Update, + Remote: 3600, + }, + }, + } + + // Generate YAML files - should not error, just skip the missing resource + fileChanges, err := GenerateYAMLFiles(ctx, b, changes) + require.NoError(t, err) + + // Should return empty list since the resource was not found + assert.Len(t, fileChanges, 0) +} + +func TestGenerateYAMLFiles_InvalidFieldPath(t *testing.T) { + ctx := logdiag.InitContext(context.Background()) + + // Create a temporary directory for the bundle + tmpDir := t.TempDir() + + // Create a simple databricks.yml + yamlContent := `resources: + jobs: + test_job: + name: "Test Job" + timeout_seconds: 3600 +` + + yamlPath := filepath.Join(tmpDir, "databricks.yml") + err := os.WriteFile(yamlPath, []byte(yamlContent), 0o644) + require.NoError(t, err) + + // Load the bundle (pass directory, not file) + b, err := bundle.Load(ctx, tmpDir) + require.NoError(t, err) + + // Initialize the bundle config + mutator.DefaultMutators(ctx, b) + + // Create changes with invalid field path syntax + changes := map[string]deployplan.Changes{ + "resources.jobs.test_job": { + "invalid[[[path": &deployplan.ChangeDesc{ + Action: deployplan.Update, + Remote: 7200, + }, + }, + } + + // Generate YAML files - should handle gracefully + fileChanges, err := GenerateYAMLFiles(ctx, b, changes) + require.NoError(t, err) + + // Should still return a FileChange, but the invalid field should be skipped + // The timeout_seconds value should remain unchanged + if len(fileChanges) > 0 { + assert.Contains(t, fileChanges[0].ModifiedContent, "timeout_seconds: 3600") + + // Parse and verify structure is maintained + var result map[string]any + err = yaml.Unmarshal([]byte(fileChanges[0].ModifiedContent), &result) + require.NoError(t, err) + + resources := result["resources"].(map[string]any) + jobs := resources["jobs"].(map[string]any) + testJob := jobs["test_job"].(map[string]any) + assert.Equal(t, 3600, testJob["timeout_seconds"]) + } +} + +func TestGenerateYAMLFiles_TargetOverride(t *testing.T) { + // Create a temporary directory for the bundle + tmpDir := t.TempDir() + + // Create main databricks.yml + mainYAML := `resources: + jobs: + main_job: + name: "Main Job" + timeout_seconds: 3600 +` + + mainPath := filepath.Join(tmpDir, "databricks.yml") + err := os.WriteFile(mainPath, []byte(mainYAML), 0o644) + require.NoError(t, err) + + // Create targets subdirectory + targetsDir := filepath.Join(tmpDir, "targets") + err = os.MkdirAll(targetsDir, 0o755) + require.NoError(t, err) + + // Create target override file + devYAML := `resources: + jobs: + dev_job: + name: "Dev Job" + timeout_seconds: 1800 +` + + devPath := filepath.Join(targetsDir, "dev.yml") + err = os.WriteFile(devPath, []byte(devYAML), 0o644) + require.NoError(t, err) + + // Create bundle config + bundleYAML := `bundle: + name: test-bundle + +include: + - "*.yml" + - "targets/*.yml" + +targets: + dev: + resources: + jobs: + dev_job: + name: "Dev Job Override" +` + + bundlePath := filepath.Join(tmpDir, "databricks.yml") + err = os.WriteFile(bundlePath, []byte(bundleYAML), 0o644) + require.NoError(t, err) + + // Note: This test may need adjustment based on how bundle loading handles includes + // For now, we test with a simpler scenario + + t.Skip("Skipping target override test - requires more complex bundle setup with includes") +} + +func TestResourceKeyToDynPath(t *testing.T) { + tests := []struct { + name string + resourceKey string + wantErr bool + wantLen int + }{ + { + name: "simple resource key", + resourceKey: "resources.jobs.my_job", + wantErr: false, + wantLen: 3, + }, + { + name: "empty resource key", + resourceKey: "", + wantErr: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + path, err := resourceKeyToDynPath(tt.resourceKey) + if tt.wantErr { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Len(t, path, tt.wantLen) + } + }) + } +} + +func TestParseResourceKey(t *testing.T) { + tests := []struct { + name string + resourceKey string + wantType string + wantName string + wantErr bool + }{ + { + name: "valid job resource", + resourceKey: "resources.jobs.my_job", + wantType: "jobs", + wantName: "my_job", + wantErr: false, + }, + { + name: "valid pipeline resource", + resourceKey: "resources.pipelines.my_pipeline", + wantType: "pipelines", + wantName: "my_pipeline", + wantErr: false, + }, + { + name: "invalid format - too few parts", + resourceKey: "resources.jobs", + wantErr: true, + }, + { + name: "invalid format - wrong prefix", + resourceKey: "targets.jobs.my_job", + wantErr: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + resourceType, resourceName, err := parseResourceKey(tt.resourceKey) + if tt.wantErr { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, tt.wantType, resourceType) + assert.Equal(t, tt.wantName, resourceName) + } + }) + } +} diff --git a/cmd/bundle/debug/config_remote_sync.go b/cmd/bundle/debug/config_remote_sync.go index d2214abd2c..b5e4bec503 100644 --- a/cmd/bundle/debug/config_remote_sync.go +++ b/cmd/bundle/debug/config_remote_sync.go @@ -56,7 +56,7 @@ Examples: } } - result := []byte{} + var result []byte if root.OutputType(cmd) == flags.OutputJSON { diffOutput := &configsync.DiffOutput{ Files: files, From ef550909377d06a6240d1bdbb925f66264defa90 Mon Sep 17 00:00:00 2001 From: Ilya Kuznetsov Date: Wed, 14 Jan 2026 17:24:26 +0100 Subject: [PATCH 03/10] File writer --- bundle/configsync/output.go | 25 +++++++- bundle/configsync/output_test.go | 89 +++++++++++++++++++++++++++++ bundle/configsync/yaml_generator.go | 6 -- 3 files changed, 113 insertions(+), 7 deletions(-) create mode 100644 bundle/configsync/output_test.go diff --git a/bundle/configsync/output.go b/bundle/configsync/output.go index b69de0cd78..fad2fd7636 100644 --- a/bundle/configsync/output.go +++ b/bundle/configsync/output.go @@ -1,6 +1,13 @@ package configsync -import "github.com/databricks/cli/bundle/deployplan" +import ( + "context" + "os" + "path/filepath" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/deployplan" +) // FileChange represents a change to a bundle configuration file type FileChange struct { @@ -14,3 +21,19 @@ type DiffOutput struct { Files []FileChange `json:"files"` Changes map[string]deployplan.Changes `json:"changes"` } + +// SaveFiles writes all file changes to disk. +func SaveFiles(ctx context.Context, b *bundle.Bundle, files []FileChange) error { + for _, file := range files { + err := os.MkdirAll(filepath.Dir(file.Path), 0o755) + if err != nil { + return err + } + + err = os.WriteFile(file.Path, []byte(file.ModifiedContent), 0o644) + if err != nil { + return err + } + } + return nil +} diff --git a/bundle/configsync/output_test.go b/bundle/configsync/output_test.go new file mode 100644 index 0000000000..1b35b807d8 --- /dev/null +++ b/bundle/configsync/output_test.go @@ -0,0 +1,89 @@ +package configsync + +import ( + "context" + "os" + "path/filepath" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestSaveFiles_Success(t *testing.T) { + ctx := context.Background() + + tmpDir := t.TempDir() + + yamlPath := filepath.Join(tmpDir, "subdir", "databricks.yml") + modifiedContent := `resources: + jobs: + test_job: + name: "Updated Job" + timeout_seconds: 7200 +` + + files := []FileChange{ + { + Path: yamlPath, + OriginalContent: "original content", + ModifiedContent: modifiedContent, + }, + } + + err := SaveFiles(ctx, &bundle.Bundle{}, files) + require.NoError(t, err) + + _, err = os.Stat(yamlPath) + require.NoError(t, err) + + content, err := os.ReadFile(yamlPath) + require.NoError(t, err) + assert.Equal(t, modifiedContent, string(content)) + + _, err = os.Stat(filepath.Dir(yamlPath)) + require.NoError(t, err) +} + +func TestSaveFiles_MultipleFiles(t *testing.T) { + ctx := context.Background() + + tmpDir := t.TempDir() + + file1Path := filepath.Join(tmpDir, "file1.yml") + file2Path := filepath.Join(tmpDir, "subdir", "file2.yml") + content1 := "content for file 1" + content2 := "content for file 2" + + files := []FileChange{ + { + Path: file1Path, + OriginalContent: "original 1", + ModifiedContent: content1, + }, + { + Path: file2Path, + OriginalContent: "original 2", + ModifiedContent: content2, + }, + } + + err := SaveFiles(ctx, &bundle.Bundle{}, files) + require.NoError(t, err) + + content, err := os.ReadFile(file1Path) + require.NoError(t, err) + assert.Equal(t, content1, string(content)) + + content, err = os.ReadFile(file2Path) + require.NoError(t, err) + assert.Equal(t, content2, string(content)) +} + +func TestSaveFiles_EmptyList(t *testing.T) { + ctx := context.Background() + + err := SaveFiles(ctx, &bundle.Bundle{}, []FileChange{}) + require.NoError(t, err) +} diff --git a/bundle/configsync/yaml_generator.go b/bundle/configsync/yaml_generator.go index 03af3c40e6..5711f7df6a 100644 --- a/bundle/configsync/yaml_generator.go +++ b/bundle/configsync/yaml_generator.go @@ -294,7 +294,6 @@ func GenerateYAMLFiles(ctx context.Context, b *bundle.Bundle, changes map[string continue } - // Create FileChange result = append(result, FileChange{ Path: filePath, OriginalContent: string(content), @@ -304,8 +303,3 @@ func GenerateYAMLFiles(ctx context.Context, b *bundle.Bundle, changes map[string return result, nil } - -// SaveFiles writes all file changes to disk. -func SaveFiles(ctx context.Context, b *bundle.Bundle, files []FileChange) error { - return nil -} From 1bf39c9f1d161c96c15417458db9d351a9e77a6f Mon Sep 17 00:00:00 2001 From: Ilya Kuznetsov Date: Wed, 14 Jan 2026 18:04:36 +0100 Subject: [PATCH 04/10] Target overrides --- bundle/configsync/yaml_generator.go | 22 +++++- bundle/configsync/yaml_generator_test.go | 93 +++++++++++++++++++----- 2 files changed, 94 insertions(+), 21 deletions(-) diff --git a/bundle/configsync/yaml_generator.go b/bundle/configsync/yaml_generator.go index 5711f7df6a..a4ac1576fc 100644 --- a/bundle/configsync/yaml_generator.go +++ b/bundle/configsync/yaml_generator.go @@ -194,7 +194,27 @@ func findResourceInFile(_ context.Context, fileValue dyn.Value, resourceType, re return resource, directPath, nil } - // If not found, search with pattern for nested resources (e.g., in target overrides) + // Check if there's a targets section and search within each target + targetsValue, err := dyn.GetByPath(fileValue, dyn.Path{dyn.Key("targets")}) + if err == nil && targetsValue.Kind() == dyn.KindMap { + targetsMap := targetsValue.MustMap() + for _, pair := range targetsMap.Pairs() { + targetName := pair.Key.MustString() + targetPath := dyn.Path{ + dyn.Key("targets"), + dyn.Key(targetName), + dyn.Key("resources"), + dyn.Key(resourceType), + dyn.Key(resourceName), + } + resource, err := dyn.GetByPath(fileValue, targetPath) + if err == nil { + return resource, targetPath, nil + } + } + } + + // If not found, search with pattern for nested resources (e.g., in includes) pattern := dyn.MustPatternFromString(fmt.Sprintf("**.resources.%s.%s", resourceType, resourceName)) var foundResource dyn.Value var foundPath dyn.Path diff --git a/bundle/configsync/yaml_generator_test.go b/bundle/configsync/yaml_generator_test.go index ac2618d70c..a6a66103f0 100644 --- a/bundle/configsync/yaml_generator_test.go +++ b/bundle/configsync/yaml_generator_test.go @@ -369,16 +369,18 @@ func TestGenerateYAMLFiles_InvalidFieldPath(t *testing.T) { } } -func TestGenerateYAMLFiles_TargetOverride(t *testing.T) { +func TestGenerateYAMLFiles_Include(t *testing.T) { + ctx := logdiag.InitContext(context.Background()) + // Create a temporary directory for the bundle tmpDir := t.TempDir() - // Create main databricks.yml - mainYAML := `resources: - jobs: - main_job: - name: "Main Job" - timeout_seconds: 3600 + // Create main databricks.yml with bundle config and includes + mainYAML := `bundle: + name: test-bundle + +include: + - "targets/*.yml" ` mainPath := filepath.Join(tmpDir, "databricks.yml") @@ -390,7 +392,7 @@ func TestGenerateYAMLFiles_TargetOverride(t *testing.T) { err = os.MkdirAll(targetsDir, 0o755) require.NoError(t, err) - // Create target override file + // Create included file with dev_job resource devYAML := `resources: jobs: dev_job: @@ -402,30 +404,81 @@ func TestGenerateYAMLFiles_TargetOverride(t *testing.T) { err = os.WriteFile(devPath, []byte(devYAML), 0o644) require.NoError(t, err) - // Create bundle config - bundleYAML := `bundle: - name: test-bundle + // Load the bundle + b, err := bundle.Load(ctx, tmpDir) + require.NoError(t, err) -include: - - "*.yml" - - "targets/*.yml" + // Process includes and other default mutators + mutator.DefaultMutators(ctx, b) + + // Create changes for the dev_job (which was defined in included file) + changes := map[string]deployplan.Changes{ + "resources.jobs.dev_job": { + "timeout_seconds": &deployplan.ChangeDesc{ + Action: deployplan.Update, + Old: 1800, + Remote: 3600, + }, + }, + } + + // Generate YAML files + fileChanges, err := GenerateYAMLFiles(ctx, b, changes) + require.NoError(t, err) + require.Len(t, fileChanges, 1) + + // Verify changes are written to targets/dev.yml (where resource was defined) + assert.Equal(t, devPath, fileChanges[0].Path) + assert.Contains(t, fileChanges[0].OriginalContent, "timeout_seconds: 1800") + assert.Contains(t, fileChanges[0].ModifiedContent, "timeout_seconds: 3600") + assert.NotContains(t, fileChanges[0].ModifiedContent, "timeout_seconds: 1800") +} + +func TestGenerateYAMLFiles_TargetOverride(t *testing.T) { + ctx := logdiag.InitContext(context.Background()) + + tmpDir := t.TempDir() + + mainYAML := `bundle: + name: test-bundle targets: dev: resources: jobs: dev_job: - name: "Dev Job Override" + name: "Dev Job" + timeout_seconds: 1800 ` - bundlePath := filepath.Join(tmpDir, "databricks.yml") - err = os.WriteFile(bundlePath, []byte(bundleYAML), 0o644) + mainPath := filepath.Join(tmpDir, "databricks.yml") + err := os.WriteFile(mainPath, []byte(mainYAML), 0o644) require.NoError(t, err) - // Note: This test may need adjustment based on how bundle loading handles includes - // For now, we test with a simpler scenario + b, err := bundle.Load(ctx, tmpDir) + require.NoError(t, err) + + mutator.DefaultMutators(ctx, b) + + diags := bundle.Apply(ctx, b, mutator.SelectTarget("dev")) + require.NoError(t, diags.Error()) - t.Skip("Skipping target override test - requires more complex bundle setup with includes") + changes := map[string]deployplan.Changes{ + "resources.jobs.dev_job": { + "timeout_seconds": &deployplan.ChangeDesc{ + Action: deployplan.Update, + Old: 1800, + Remote: 3600, + }, + }, + } + + fileChanges, err := GenerateYAMLFiles(ctx, b, changes) + require.NoError(t, err) + require.Len(t, fileChanges, 1) + + assert.Equal(t, mainPath, fileChanges[0].Path) + assert.Contains(t, fileChanges[0].ModifiedContent, "timeout_seconds: 3600") } func TestResourceKeyToDynPath(t *testing.T) { From afc2e87ca27ecd4eb664875697b2e3ef9e94c993 Mon Sep 17 00:00:00 2001 From: Ilya Kuznetsov Date: Wed, 14 Jan 2026 18:19:50 +0100 Subject: [PATCH 05/10] Cleanup --- bundle/configsync/yaml_generator.go | 61 ++++++----------------------- 1 file changed, 13 insertions(+), 48 deletions(-) diff --git a/bundle/configsync/yaml_generator.go b/bundle/configsync/yaml_generator.go index a4ac1576fc..f7e541d6ea 100644 --- a/bundle/configsync/yaml_generator.go +++ b/bundle/configsync/yaml_generator.go @@ -62,7 +62,6 @@ func structpathToDynPath(_ context.Context, pathStr string, baseValue dyn.Value) return nil, fmt.Errorf("failed to parse path %s: %w", pathStr, err) } - // Convert PathNode linked list to slice for forward iteration nodes := node.AsSlice() var dynPath dyn.Path @@ -137,11 +136,6 @@ func applyChanges(ctx context.Context, resource dyn.Value, changes deployplan.Ch result := resource for fieldPath, changeDesc := range changes { - // Skip if no remote value or action is Skip - if changeDesc.Remote == nil || changeDesc.Action == deployplan.Skip { - continue - } - // Convert structpath to dyn.Path dynPath, err := structpathToDynPath(ctx, fieldPath, result) if err != nil { @@ -186,47 +180,23 @@ func parseResourceKey(resourceKey string) (resourceType, resourceName string, er } // findResourceInFile searches for a resource within a loaded file's dyn.Value -func findResourceInFile(_ context.Context, fileValue dyn.Value, resourceType, resourceName string) (dyn.Value, dyn.Path, error) { - // Try direct path first: resources.TYPE.NAME - directPath := dyn.Path{dyn.Key("resources"), dyn.Key(resourceType), dyn.Key(resourceName)} - resource, err := dyn.GetByPath(fileValue, directPath) - if err == nil { - return resource, directPath, nil +func findResourceInFile(_ context.Context, fileValue dyn.Value, resourceType, resourceName, targetName string) (dyn.Value, dyn.Path, error) { + patternsToCheck := []dyn.Path{ + {dyn.Key("targets"), dyn.Key(targetName), dyn.Key("resources"), dyn.Key(resourceType), dyn.Key(resourceName)}, + {dyn.Key("resources"), dyn.Key(resourceType), dyn.Key(resourceName)}, } - // Check if there's a targets section and search within each target - targetsValue, err := dyn.GetByPath(fileValue, dyn.Path{dyn.Key("targets")}) - if err == nil && targetsValue.Kind() == dyn.KindMap { - targetsMap := targetsValue.MustMap() - for _, pair := range targetsMap.Pairs() { - targetName := pair.Key.MustString() - targetPath := dyn.Path{ - dyn.Key("targets"), - dyn.Key(targetName), - dyn.Key("resources"), - dyn.Key(resourceType), - dyn.Key(resourceName), - } - resource, err := dyn.GetByPath(fileValue, targetPath) - if err == nil { - return resource, targetPath, nil - } + for _, pattern := range patternsToCheck { + resource, err := dyn.GetByPath(fileValue, pattern) + if err == nil { + return resource, pattern, nil } } - // If not found, search with pattern for nested resources (e.g., in includes) - pattern := dyn.MustPatternFromString(fmt.Sprintf("**.resources.%s.%s", resourceType, resourceName)) - var foundResource dyn.Value - var foundPath dyn.Path - - _, _ = dyn.MapByPattern(fileValue, pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { - foundResource = v - foundPath = p - return v, nil - }) - - if foundResource.IsValid() { - return foundResource, foundPath, nil + directPath := dyn.Path{dyn.Key("resources"), dyn.Key(resourceType), dyn.Key(resourceName)} + resource, err := dyn.GetByPath(fileValue, directPath) + if err == nil { + return resource, directPath, nil } return dyn.NilValue, nil, fmt.Errorf("resource %s.%s not found in file", resourceType, resourceName) @@ -234,17 +204,14 @@ func findResourceInFile(_ context.Context, fileValue dyn.Value, resourceType, re // GenerateYAMLFiles generates YAML files for the given changes. func GenerateYAMLFiles(ctx context.Context, b *bundle.Bundle, changes map[string]deployplan.Changes) ([]FileChange, error) { - // Get bundle config as dyn.Value configValue := b.Config.Value() - // Group changes by file path fileChanges := make(map[string][]struct { resourceKey string changes deployplan.Changes }) for resourceKey, resourceChanges := range changes { - // Get resource location from bundle config _, loc, err := getResourceWithLocation(configValue, resourceKey) if err != nil { log.Warnf(ctx, "Failed to find resource %s in bundle config: %v", resourceKey, err) @@ -258,11 +225,9 @@ func GenerateYAMLFiles(ctx context.Context, b *bundle.Bundle, changes map[string }{resourceKey, resourceChanges}) } - // Process each file var result []FileChange for filePath, resourcesInFile := range fileChanges { - // Load original file content content, err := os.ReadFile(filePath) if err != nil { log.Warnf(ctx, "Failed to read file %s: %v", filePath, err) @@ -286,7 +251,7 @@ func GenerateYAMLFiles(ctx context.Context, b *bundle.Bundle, changes map[string } // Find resource in loaded file - resource, resourcePath, err := findResourceInFile(ctx, fileValue, resourceType, resourceName) + resource, resourcePath, err := findResourceInFile(ctx, fileValue, resourceType, resourceName, b.Config.Bundle.Target) if err != nil { log.Warnf(ctx, "Failed to find resource %s in file %s: %v", item.resourceKey, filePath, err) continue From beba54d50b7c697583edfde8f0662cb083daa8ab Mon Sep 17 00:00:00 2001 From: Ilya Kuznetsov Date: Thu, 15 Jan 2026 11:11:24 +0100 Subject: [PATCH 06/10] Fix invalid Dyn panic --- bundle/configsync/yaml_generator.go | 13 +- bundle/configsync/yaml_generator_test.go | 184 +++++++++++++++++++++++ 2 files changed, 194 insertions(+), 3 deletions(-) diff --git a/bundle/configsync/yaml_generator.go b/bundle/configsync/yaml_generator.go index f7e541d6ea..54980ac866 100644 --- a/bundle/configsync/yaml_generator.go +++ b/bundle/configsync/yaml_generator.go @@ -11,6 +11,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/deployplan" "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn/convert" "github.com/databricks/cli/libs/dyn/yamlloader" "github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/structs/structpath" @@ -143,13 +144,19 @@ func applyChanges(ctx context.Context, resource dyn.Value, changes deployplan.Ch continue } - // Set the remote value at the path - remoteValue := dyn.V(changeDesc.Remote) - result, err = dyn.SetByPath(result, dynPath, remoteValue) + // Convert remote value to dyn.Value, handling custom types like enums + remoteValue, err := convert.FromTyped(changeDesc.Remote, dyn.NilValue) + if err != nil { + log.Warnf(ctx, "Failed to convert remote value at path %s: %v", fieldPath, err) + continue + } + + newResult, err := dyn.SetByPath(result, dynPath, remoteValue) if err != nil { log.Warnf(ctx, "Failed to set value at path %s: %v", fieldPath, err) continue } + result = newResult } return result, nil diff --git a/bundle/configsync/yaml_generator_test.go b/bundle/configsync/yaml_generator_test.go index a6a66103f0..22b78ca915 100644 --- a/bundle/configsync/yaml_generator_test.go +++ b/bundle/configsync/yaml_generator_test.go @@ -9,7 +9,9 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/deployplan" + "github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/logdiag" + "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gopkg.in/yaml.v3" @@ -561,3 +563,185 @@ func TestParseResourceKey(t *testing.T) { }) } } + +func TestApplyChangesWithEnumTypes(t *testing.T) { + ctx := context.Background() + + resource := dyn.V(map[string]dyn.Value{ + "edit_mode": dyn.V("EDITABLE"), + "name": dyn.V("test_job"), + }) + + changes := deployplan.Changes{ + "edit_mode": &deployplan.ChangeDesc{ + Remote: jobs.JobEditModeUiLocked, + }, + } + + result, err := applyChanges(ctx, resource, changes) + require.NoError(t, err) + + editMode, err := dyn.GetByPath(result, dyn.Path{dyn.Key("edit_mode")}) + require.NoError(t, err) + assert.Equal(t, dyn.KindString, editMode.Kind()) + assert.Equal(t, "UI_LOCKED", editMode.MustString()) +} + +func TestApplyChangesWithPrimitiveTypes(t *testing.T) { + ctx := context.Background() + + resource := dyn.V(map[string]dyn.Value{ + "name": dyn.V("old_name"), + "timeout": dyn.V(100), + "enabled": dyn.V(false), + "max_retries": dyn.V(1.5), + }) + + changes := deployplan.Changes{ + "name": &deployplan.ChangeDesc{ + Remote: "new_name", + }, + "timeout": &deployplan.ChangeDesc{ + Remote: int64(200), + }, + "enabled": &deployplan.ChangeDesc{ + Remote: true, + }, + "max_retries": &deployplan.ChangeDesc{ + Remote: 2.5, + }, + } + + result, err := applyChanges(ctx, resource, changes) + require.NoError(t, err) + + name, err := dyn.GetByPath(result, dyn.Path{dyn.Key("name")}) + require.NoError(t, err) + assert.Equal(t, "new_name", name.MustString()) + + timeout, err := dyn.GetByPath(result, dyn.Path{dyn.Key("timeout")}) + require.NoError(t, err) + assert.Equal(t, int64(200), timeout.MustInt()) + + enabled, err := dyn.GetByPath(result, dyn.Path{dyn.Key("enabled")}) + require.NoError(t, err) + assert.True(t, enabled.MustBool()) + + maxRetries, err := dyn.GetByPath(result, dyn.Path{dyn.Key("max_retries")}) + require.NoError(t, err) + assert.InDelta(t, 2.5, maxRetries.MustFloat(), 0.001) +} + +func TestApplyChangesWithNilValues(t *testing.T) { + ctx := context.Background() + + resource := dyn.V(map[string]dyn.Value{ + "name": dyn.V("test_job"), + "description": dyn.V("some description"), + }) + + changes := deployplan.Changes{ + "description": &deployplan.ChangeDesc{ + Remote: nil, + }, + } + + result, err := applyChanges(ctx, resource, changes) + require.NoError(t, err) + + description, err := dyn.GetByPath(result, dyn.Path{dyn.Key("description")}) + require.NoError(t, err) + assert.Equal(t, dyn.KindNil, description.Kind()) +} + +func TestApplyChangesWithStructValues(t *testing.T) { + ctx := context.Background() + + resource := dyn.V(map[string]dyn.Value{ + "name": dyn.V("test_job"), + "settings": dyn.V(map[string]dyn.Value{ + "timeout": dyn.V(100), + }), + }) + + type Settings struct { + Timeout int64 `json:"timeout"` + MaxRetries *int64 `json:"max_retries,omitempty"` + } + + maxRetries := int64(3) + changes := deployplan.Changes{ + "settings": &deployplan.ChangeDesc{ + Remote: &Settings{ + Timeout: 200, + MaxRetries: &maxRetries, + }, + }, + } + + result, err := applyChanges(ctx, resource, changes) + require.NoError(t, err) + + settings, err := dyn.GetByPath(result, dyn.Path{dyn.Key("settings")}) + require.NoError(t, err) + assert.Equal(t, dyn.KindMap, settings.Kind()) + + timeout, err := dyn.GetByPath(settings, dyn.Path{dyn.Key("timeout")}) + require.NoError(t, err) + assert.Equal(t, int64(200), timeout.MustInt()) + + retriesVal, err := dyn.GetByPath(settings, dyn.Path{dyn.Key("max_retries")}) + require.NoError(t, err) + assert.Equal(t, int64(3), retriesVal.MustInt()) +} + +func TestGenerateYAMLFiles_WithEnumValues(t *testing.T) { + ctx := logdiag.InitContext(context.Background()) + + tmpDir := t.TempDir() + + yamlContent := `resources: + jobs: + test_job: + name: "Test Job" + edit_mode: "EDITABLE" + timeout_seconds: 3600 +` + + yamlPath := filepath.Join(tmpDir, "databricks.yml") + err := os.WriteFile(yamlPath, []byte(yamlContent), 0o644) + require.NoError(t, err) + + b, err := bundle.Load(ctx, tmpDir) + require.NoError(t, err) + + mutator.DefaultMutators(ctx, b) + + changes := map[string]deployplan.Changes{ + "resources.jobs.test_job": { + "edit_mode": &deployplan.ChangeDesc{ + Action: deployplan.Update, + Old: "EDITABLE", + Remote: jobs.JobEditModeUiLocked, + }, + }, + } + + fileChanges, err := GenerateYAMLFiles(ctx, b, changes) + require.NoError(t, err) + require.Len(t, fileChanges, 1) + + assert.Equal(t, yamlPath, fileChanges[0].Path) + assert.Contains(t, fileChanges[0].OriginalContent, "edit_mode: \"EDITABLE\"") + assert.Contains(t, fileChanges[0].ModifiedContent, "edit_mode: UI_LOCKED") + assert.NotContains(t, fileChanges[0].ModifiedContent, "edit_mode: EDITABLE") + + var result map[string]any + err = yaml.Unmarshal([]byte(fileChanges[0].ModifiedContent), &result) + require.NoError(t, err) + + resources := result["resources"].(map[string]any) + jobs := resources["jobs"].(map[string]any) + testJob := jobs["test_job"].(map[string]any) + assert.Equal(t, "UI_LOCKED", testJob["edit_mode"]) +} From d8fac50eef11cba327f076187da66c6ec937a6b4 Mon Sep 17 00:00:00 2001 From: Ilya Kuznetsov Date: Thu, 15 Jan 2026 11:18:56 +0100 Subject: [PATCH 07/10] Fix test to use structs --- bundle/configsync/yaml_generator_test.go | 128 ++++++++++++++--------- 1 file changed, 77 insertions(+), 51 deletions(-) diff --git a/bundle/configsync/yaml_generator_test.go b/bundle/configsync/yaml_generator_test.go index 22b78ca915..281c5c9f89 100644 --- a/bundle/configsync/yaml_generator_test.go +++ b/bundle/configsync/yaml_generator_test.go @@ -483,6 +483,83 @@ targets: assert.Contains(t, fileChanges[0].ModifiedContent, "timeout_seconds: 3600") } +func TestGenerateYAMLFiles_WithStructValues(t *testing.T) { + ctx := logdiag.InitContext(context.Background()) + + tmpDir := t.TempDir() + + yamlContent := `resources: + jobs: + test_job: + name: "Test Job" + timeout_seconds: 3600 + email_notifications: + on_success: + - old@example.com +` + + yamlPath := filepath.Join(tmpDir, "databricks.yml") + err := os.WriteFile(yamlPath, []byte(yamlContent), 0o644) + require.NoError(t, err) + + b, err := bundle.Load(ctx, tmpDir) + require.NoError(t, err) + + mutator.DefaultMutators(ctx, b) + + type EmailNotifications struct { + OnSuccess []string `json:"on_success,omitempty" yaml:"on_success,omitempty"` + OnFailure []string `json:"on_failure,omitempty" yaml:"on_failure,omitempty"` + } + + changes := map[string]deployplan.Changes{ + "resources.jobs.test_job": { + "email_notifications": &deployplan.ChangeDesc{ + Action: deployplan.Update, + Remote: &EmailNotifications{ + OnSuccess: []string{"success@example.com"}, + OnFailure: []string{"failure@example.com"}, + }, + }, + }, + } + + fileChanges, err := GenerateYAMLFiles(ctx, b, changes) + require.NoError(t, err) + require.Len(t, fileChanges, 1) + + assert.Equal(t, yamlPath, fileChanges[0].Path) + assert.Contains(t, fileChanges[0].OriginalContent, "on_success:") + assert.Contains(t, fileChanges[0].OriginalContent, "old@example.com") + assert.Contains(t, fileChanges[0].ModifiedContent, "success@example.com") + assert.Contains(t, fileChanges[0].ModifiedContent, "failure@example.com") + + type JobsConfig struct { + Name string `yaml:"name"` + TimeoutSeconds int `yaml:"timeout_seconds"` + EmailNotifications *EmailNotifications `yaml:"email_notifications,omitempty"` + } + + type ResourcesConfig struct { + Jobs map[string]JobsConfig `yaml:"jobs"` + } + + type RootConfig struct { + Resources ResourcesConfig `yaml:"resources"` + } + + var result RootConfig + err = yaml.Unmarshal([]byte(fileChanges[0].ModifiedContent), &result) + require.NoError(t, err) + + testJob := result.Resources.Jobs["test_job"] + assert.Equal(t, "Test Job", testJob.Name) + assert.Equal(t, 3600, testJob.TimeoutSeconds) + require.NotNil(t, testJob.EmailNotifications) + assert.Equal(t, []string{"success@example.com"}, testJob.EmailNotifications.OnSuccess) + assert.Equal(t, []string{"failure@example.com"}, testJob.EmailNotifications.OnFailure) +} + func TestResourceKeyToDynPath(t *testing.T) { tests := []struct { name string @@ -694,54 +771,3 @@ func TestApplyChangesWithStructValues(t *testing.T) { require.NoError(t, err) assert.Equal(t, int64(3), retriesVal.MustInt()) } - -func TestGenerateYAMLFiles_WithEnumValues(t *testing.T) { - ctx := logdiag.InitContext(context.Background()) - - tmpDir := t.TempDir() - - yamlContent := `resources: - jobs: - test_job: - name: "Test Job" - edit_mode: "EDITABLE" - timeout_seconds: 3600 -` - - yamlPath := filepath.Join(tmpDir, "databricks.yml") - err := os.WriteFile(yamlPath, []byte(yamlContent), 0o644) - require.NoError(t, err) - - b, err := bundle.Load(ctx, tmpDir) - require.NoError(t, err) - - mutator.DefaultMutators(ctx, b) - - changes := map[string]deployplan.Changes{ - "resources.jobs.test_job": { - "edit_mode": &deployplan.ChangeDesc{ - Action: deployplan.Update, - Old: "EDITABLE", - Remote: jobs.JobEditModeUiLocked, - }, - }, - } - - fileChanges, err := GenerateYAMLFiles(ctx, b, changes) - require.NoError(t, err) - require.Len(t, fileChanges, 1) - - assert.Equal(t, yamlPath, fileChanges[0].Path) - assert.Contains(t, fileChanges[0].OriginalContent, "edit_mode: \"EDITABLE\"") - assert.Contains(t, fileChanges[0].ModifiedContent, "edit_mode: UI_LOCKED") - assert.NotContains(t, fileChanges[0].ModifiedContent, "edit_mode: EDITABLE") - - var result map[string]any - err = yaml.Unmarshal([]byte(fileChanges[0].ModifiedContent), &result) - require.NoError(t, err) - - resources := result["resources"].(map[string]any) - jobs := resources["jobs"].(map[string]any) - testJob := jobs["test_job"].(map[string]any) - assert.Equal(t, "UI_LOCKED", testJob["edit_mode"]) -} From e0bb6b1cdbed48ec176438e4ae00683ff4440d1a Mon Sep 17 00:00:00 2001 From: Ilya Kuznetsov Date: Thu, 15 Jan 2026 11:21:38 +0100 Subject: [PATCH 08/10] Cleanup --- bundle/configsync/yaml_generator_test.go | 57 ------------------------ 1 file changed, 57 deletions(-) diff --git a/bundle/configsync/yaml_generator_test.go b/bundle/configsync/yaml_generator_test.go index 281c5c9f89..12278653ee 100644 --- a/bundle/configsync/yaml_generator_test.go +++ b/bundle/configsync/yaml_generator_test.go @@ -20,10 +20,8 @@ import ( func TestGenerateYAMLFiles_SimpleFieldChange(t *testing.T) { ctx := logdiag.InitContext(context.Background()) - // Create a temporary directory for the bundle tmpDir := t.TempDir() - // Create a simple databricks.yml with a job yamlContent := `resources: jobs: test_job: @@ -39,14 +37,11 @@ func TestGenerateYAMLFiles_SimpleFieldChange(t *testing.T) { err := os.WriteFile(yamlPath, []byte(yamlContent), 0o644) require.NoError(t, err) - // Load the bundle (pass directory, not file) b, err := bundle.Load(ctx, tmpDir) require.NoError(t, err) - // Initialize the bundle config mutator.DefaultMutators(ctx, b) - // Create changes map changes := map[string]deployplan.Changes{ "resources.jobs.test_job": { "timeout_seconds": &deployplan.ChangeDesc{ @@ -70,10 +65,8 @@ func TestGenerateYAMLFiles_SimpleFieldChange(t *testing.T) { func TestGenerateYAMLFiles_NestedFieldChange(t *testing.T) { ctx := logdiag.InitContext(context.Background()) - // Create a temporary directory for the bundle tmpDir := t.TempDir() - // Create a simple databricks.yml with a job yamlContent := `resources: jobs: test_job: @@ -89,14 +82,11 @@ func TestGenerateYAMLFiles_NestedFieldChange(t *testing.T) { err := os.WriteFile(yamlPath, []byte(yamlContent), 0o644) require.NoError(t, err) - // Load the bundle (pass directory, not file) b, err := bundle.Load(ctx, tmpDir) require.NoError(t, err) - // Initialize the bundle config mutator.DefaultMutators(ctx, b) - // Create changes map for nested field changes := map[string]deployplan.Changes{ "resources.jobs.test_job": { "tasks[0].timeout_seconds": &deployplan.ChangeDesc{ @@ -107,20 +97,16 @@ func TestGenerateYAMLFiles_NestedFieldChange(t *testing.T) { }, } - // Generate YAML files fileChanges, err := GenerateYAMLFiles(ctx, b, changes) require.NoError(t, err) require.Len(t, fileChanges, 1) - // Verify modified content contains the new value assert.Contains(t, fileChanges[0].ModifiedContent, "timeout_seconds: 3600") - // Parse YAML to verify structure var result map[string]any err = yaml.Unmarshal([]byte(fileChanges[0].ModifiedContent), &result) require.NoError(t, err) - // Navigate to verify the change resources := result["resources"].(map[string]any) jobs := resources["jobs"].(map[string]any) testJob := jobs["test_job"].(map[string]any) @@ -133,10 +119,8 @@ func TestGenerateYAMLFiles_NestedFieldChange(t *testing.T) { func TestGenerateYAMLFiles_ArrayKeyValueAccess(t *testing.T) { ctx := logdiag.InitContext(context.Background()) - // Create a temporary directory for the bundle tmpDir := t.TempDir() - // Create a simple databricks.yml with a job with multiple tasks yamlContent := `resources: jobs: test_job: @@ -156,14 +140,11 @@ func TestGenerateYAMLFiles_ArrayKeyValueAccess(t *testing.T) { err := os.WriteFile(yamlPath, []byte(yamlContent), 0o644) require.NoError(t, err) - // Load the bundle (pass directory, not file) b, err := bundle.Load(ctx, tmpDir) require.NoError(t, err) - // Initialize the bundle config mutator.DefaultMutators(ctx, b) - // Create changes map using key-value syntax changes := map[string]deployplan.Changes{ "resources.jobs.test_job": { "tasks[task_key='main_task'].timeout_seconds": &deployplan.ChangeDesc{ @@ -174,12 +155,10 @@ func TestGenerateYAMLFiles_ArrayKeyValueAccess(t *testing.T) { }, } - // Generate YAML files fileChanges, err := GenerateYAMLFiles(ctx, b, changes) require.NoError(t, err) require.Len(t, fileChanges, 1) - // Parse YAML to verify the correct task was updated var result map[string]any err = yaml.Unmarshal([]byte(fileChanges[0].ModifiedContent), &result) require.NoError(t, err) @@ -189,12 +168,10 @@ func TestGenerateYAMLFiles_ArrayKeyValueAccess(t *testing.T) { testJob := jobs["test_job"].(map[string]any) tasks := testJob["tasks"].([]any) - // Verify setup_task (index 0) is unchanged task0 := tasks[0].(map[string]any) assert.Equal(t, "setup_task", task0["task_key"]) assert.Equal(t, 600, task0["timeout_seconds"]) - // Verify main_task (index 1) is updated task1 := tasks[1].(map[string]any) assert.Equal(t, "main_task", task1["task_key"]) assert.Equal(t, 3600, task1["timeout_seconds"]) @@ -203,10 +180,8 @@ func TestGenerateYAMLFiles_ArrayKeyValueAccess(t *testing.T) { func TestGenerateYAMLFiles_MultipleResourcesSameFile(t *testing.T) { ctx := logdiag.InitContext(context.Background()) - // Create a temporary directory for the bundle tmpDir := t.TempDir() - // Create databricks.yml with multiple jobs yamlContent := `resources: jobs: job1: @@ -221,14 +196,11 @@ func TestGenerateYAMLFiles_MultipleResourcesSameFile(t *testing.T) { err := os.WriteFile(yamlPath, []byte(yamlContent), 0o644) require.NoError(t, err) - // Load the bundle (pass directory, not file) b, err := bundle.Load(ctx, tmpDir) require.NoError(t, err) - // Initialize the bundle config mutator.DefaultMutators(ctx, b) - // Create changes for both jobs changes := map[string]deployplan.Changes{ "resources.jobs.job1": { "timeout_seconds": &deployplan.ChangeDesc{ @@ -246,19 +218,15 @@ func TestGenerateYAMLFiles_MultipleResourcesSameFile(t *testing.T) { }, } - // Generate YAML files fileChanges, err := GenerateYAMLFiles(ctx, b, changes) require.NoError(t, err) - // Should only have one FileChange since both resources are in the same file require.Len(t, fileChanges, 1) assert.Equal(t, yamlPath, fileChanges[0].Path) - // Verify both changes are applied assert.Contains(t, fileChanges[0].ModifiedContent, "job1") assert.Contains(t, fileChanges[0].ModifiedContent, "job2") - // Parse and verify both jobs are updated var result map[string]any err = yaml.Unmarshal([]byte(fileChanges[0].ModifiedContent), &result) require.NoError(t, err) @@ -276,10 +244,8 @@ func TestGenerateYAMLFiles_MultipleResourcesSameFile(t *testing.T) { func TestGenerateYAMLFiles_ResourceNotFound(t *testing.T) { ctx := logdiag.InitContext(context.Background()) - // Create a temporary directory for the bundle tmpDir := t.TempDir() - // Create a simple databricks.yml yamlContent := `resources: jobs: existing_job: @@ -290,14 +256,11 @@ func TestGenerateYAMLFiles_ResourceNotFound(t *testing.T) { err := os.WriteFile(yamlPath, []byte(yamlContent), 0o644) require.NoError(t, err) - // Load the bundle (pass directory, not file) b, err := bundle.Load(ctx, tmpDir) require.NoError(t, err) - // Initialize the bundle config mutator.DefaultMutators(ctx, b) - // Create changes for a non-existent resource changes := map[string]deployplan.Changes{ "resources.jobs.nonexistent_job": { "timeout_seconds": &deployplan.ChangeDesc{ @@ -307,21 +270,17 @@ func TestGenerateYAMLFiles_ResourceNotFound(t *testing.T) { }, } - // Generate YAML files - should not error, just skip the missing resource fileChanges, err := GenerateYAMLFiles(ctx, b, changes) require.NoError(t, err) - // Should return empty list since the resource was not found assert.Len(t, fileChanges, 0) } func TestGenerateYAMLFiles_InvalidFieldPath(t *testing.T) { ctx := logdiag.InitContext(context.Background()) - // Create a temporary directory for the bundle tmpDir := t.TempDir() - // Create a simple databricks.yml yamlContent := `resources: jobs: test_job: @@ -333,14 +292,11 @@ func TestGenerateYAMLFiles_InvalidFieldPath(t *testing.T) { err := os.WriteFile(yamlPath, []byte(yamlContent), 0o644) require.NoError(t, err) - // Load the bundle (pass directory, not file) b, err := bundle.Load(ctx, tmpDir) require.NoError(t, err) - // Initialize the bundle config mutator.DefaultMutators(ctx, b) - // Create changes with invalid field path syntax changes := map[string]deployplan.Changes{ "resources.jobs.test_job": { "invalid[[[path": &deployplan.ChangeDesc{ @@ -350,16 +306,12 @@ func TestGenerateYAMLFiles_InvalidFieldPath(t *testing.T) { }, } - // Generate YAML files - should handle gracefully fileChanges, err := GenerateYAMLFiles(ctx, b, changes) require.NoError(t, err) - // Should still return a FileChange, but the invalid field should be skipped - // The timeout_seconds value should remain unchanged if len(fileChanges) > 0 { assert.Contains(t, fileChanges[0].ModifiedContent, "timeout_seconds: 3600") - // Parse and verify structure is maintained var result map[string]any err = yaml.Unmarshal([]byte(fileChanges[0].ModifiedContent), &result) require.NoError(t, err) @@ -374,10 +326,8 @@ func TestGenerateYAMLFiles_InvalidFieldPath(t *testing.T) { func TestGenerateYAMLFiles_Include(t *testing.T) { ctx := logdiag.InitContext(context.Background()) - // Create a temporary directory for the bundle tmpDir := t.TempDir() - // Create main databricks.yml with bundle config and includes mainYAML := `bundle: name: test-bundle @@ -389,12 +339,10 @@ include: err := os.WriteFile(mainPath, []byte(mainYAML), 0o644) require.NoError(t, err) - // Create targets subdirectory targetsDir := filepath.Join(tmpDir, "targets") err = os.MkdirAll(targetsDir, 0o755) require.NoError(t, err) - // Create included file with dev_job resource devYAML := `resources: jobs: dev_job: @@ -406,14 +354,11 @@ include: err = os.WriteFile(devPath, []byte(devYAML), 0o644) require.NoError(t, err) - // Load the bundle b, err := bundle.Load(ctx, tmpDir) require.NoError(t, err) - // Process includes and other default mutators mutator.DefaultMutators(ctx, b) - // Create changes for the dev_job (which was defined in included file) changes := map[string]deployplan.Changes{ "resources.jobs.dev_job": { "timeout_seconds": &deployplan.ChangeDesc{ @@ -424,12 +369,10 @@ include: }, } - // Generate YAML files fileChanges, err := GenerateYAMLFiles(ctx, b, changes) require.NoError(t, err) require.Len(t, fileChanges, 1) - // Verify changes are written to targets/dev.yml (where resource was defined) assert.Equal(t, devPath, fileChanges[0].Path) assert.Contains(t, fileChanges[0].OriginalContent, "timeout_seconds: 1800") assert.Contains(t, fileChanges[0].ModifiedContent, "timeout_seconds: 3600") From 02be4c14e8d108b93b679c23e76e2844031ee4ac Mon Sep 17 00:00:00 2001 From: Ilya Kuznetsov Date: Thu, 15 Jan 2026 13:33:16 +0100 Subject: [PATCH 09/10] Fix missing tags issue --- bundle/configsync/path.go | 42 ++++ bundle/configsync/path_test.go | 233 +++++++++++++++++++++++ bundle/configsync/yaml_generator.go | 14 +- bundle/configsync/yaml_generator_test.go | 29 +++ 4 files changed, 317 insertions(+), 1 deletion(-) create mode 100644 bundle/configsync/path.go create mode 100644 bundle/configsync/path_test.go diff --git a/bundle/configsync/path.go b/bundle/configsync/path.go new file mode 100644 index 0000000000..343a1a5ad8 --- /dev/null +++ b/bundle/configsync/path.go @@ -0,0 +1,42 @@ +package configsync + +import ( + "fmt" + + "github.com/databricks/cli/libs/dyn" +) + +// ensurePathExists ensures all intermediate nodes exist in the path. +// It creates empty maps for missing intermediate map keys. +// For sequence indices, it verifies they exist but does not create them. +// Returns the modified value with all intermediate nodes guaranteed to exist. +func ensurePathExists(v dyn.Value, path dyn.Path) (dyn.Value, error) { + if len(path) == 0 { + return v, nil + } + + result := v + for i := 1; i < len(path); i++ { + prefixPath := path[:i] + component := path[i-1] + + item, _ := dyn.GetByPath(result, prefixPath) + if !item.IsValid() { + if component.Key() != "" { + if i < len(path) && path[i].Key() == "" { + return dyn.InvalidValue, fmt.Errorf("sequence index does not exist at path %s", prefixPath) + } + + var err error + result, err = dyn.SetByPath(result, prefixPath, dyn.V(dyn.NewMapping())) + if err != nil { + return dyn.InvalidValue, fmt.Errorf("failed to create intermediate path %s: %w", prefixPath, err) + } + } else { + return dyn.InvalidValue, fmt.Errorf("sequence index does not exist at path %s", prefixPath) + } + } + } + + return result, nil +} diff --git a/bundle/configsync/path_test.go b/bundle/configsync/path_test.go new file mode 100644 index 0000000000..faa29188a5 --- /dev/null +++ b/bundle/configsync/path_test.go @@ -0,0 +1,233 @@ +package configsync + +import ( + "testing" + + "github.com/databricks/cli/libs/dyn" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestEnsurePathExists(t *testing.T) { + t.Run("empty path returns original value", func(t *testing.T) { + v := dyn.V(map[string]dyn.Value{ + "foo": dyn.V("bar"), + }) + + result, err := ensurePathExists(v, dyn.Path{}) + require.NoError(t, err) + assert.Equal(t, v, result) + }) + + t.Run("single-level path on existing map", func(t *testing.T) { + v := dyn.V(map[string]dyn.Value{ + "existing": dyn.V("value"), + }) + + path := dyn.Path{dyn.Key("new")} + result, err := ensurePathExists(v, path) + require.NoError(t, err) + + // Original key should still exist + existing, err := dyn.GetByPath(result, dyn.Path{dyn.Key("existing")}) + require.NoError(t, err) + assert.Equal(t, "value", existing.MustString()) + }) + + t.Run("multi-level nested path creates all intermediate nodes", func(t *testing.T) { + v := dyn.V(map[string]dyn.Value{}) + + path := dyn.Path{ + dyn.Key("level1"), + dyn.Key("level2"), + dyn.Key("level3"), + } + + result, err := ensurePathExists(v, path) + require.NoError(t, err) + + // Check that all intermediate nodes exist + level1, err := dyn.GetByPath(result, dyn.Path{dyn.Key("level1")}) + require.NoError(t, err) + assert.Equal(t, dyn.KindMap, level1.Kind()) + + level2, err := dyn.GetByPath(result, dyn.Path{dyn.Key("level1"), dyn.Key("level2")}) + require.NoError(t, err) + assert.Equal(t, dyn.KindMap, level2.Kind()) + }) + + t.Run("partially existing path creates only missing nodes", func(t *testing.T) { + v := dyn.V(map[string]dyn.Value{ + "resources": dyn.V(map[string]dyn.Value{ + "existing": dyn.V("value"), + }), + }) + + path := dyn.Path{ + dyn.Key("resources"), + dyn.Key("jobs"), + dyn.Key("my_job"), + } + + result, err := ensurePathExists(v, path) + require.NoError(t, err) + + // Check that existing data is preserved + existing, err := dyn.GetByPath(result, dyn.Path{dyn.Key("resources"), dyn.Key("existing")}) + require.NoError(t, err) + assert.Equal(t, "value", existing.MustString()) + + // Check that new intermediate node was created + jobs, err := dyn.GetByPath(result, dyn.Path{dyn.Key("resources"), dyn.Key("jobs")}) + require.NoError(t, err) + assert.Equal(t, dyn.KindMap, jobs.Kind()) + }) + + t.Run("fully existing path is idempotent", func(t *testing.T) { + v := dyn.V(map[string]dyn.Value{ + "resources": dyn.V(map[string]dyn.Value{ + "jobs": dyn.V(map[string]dyn.Value{ + "my_job": dyn.V(map[string]dyn.Value{ + "name": dyn.V("test"), + }), + }), + }), + }) + + path := dyn.Path{ + dyn.Key("resources"), + dyn.Key("jobs"), + dyn.Key("my_job"), + } + + result, err := ensurePathExists(v, path) + require.NoError(t, err) + + // Check that existing nested data is preserved + name, err := dyn.GetByPath(result, dyn.Path{dyn.Key("resources"), dyn.Key("jobs"), dyn.Key("my_job"), dyn.Key("name")}) + require.NoError(t, err) + assert.Equal(t, "test", name.MustString()) + }) + + t.Run("can set value after ensuring path exists", func(t *testing.T) { + v := dyn.V(map[string]dyn.Value{}) + + path := dyn.Path{ + dyn.Key("resources"), + dyn.Key("jobs"), + dyn.Key("my_job"), + } + + // Ensure path exists + result, err := ensurePathExists(v, path) + require.NoError(t, err) + + // Now SetByPath should work without errors + finalValue := dyn.V(map[string]dyn.Value{ + "name": dyn.V("test_job"), + }) + + result, err = dyn.SetByPath(result, path, finalValue) + require.NoError(t, err) + + // Verify the value was set correctly + job, err := dyn.GetByPath(result, path) + require.NoError(t, err) + jobMap, ok := job.AsMap() + require.True(t, ok) + name, exists := jobMap.GetByString("name") + require.True(t, exists) + assert.Equal(t, "test_job", name.MustString()) + }) + + t.Run("handles deeply nested paths", func(t *testing.T) { + v := dyn.V(map[string]dyn.Value{}) + + path := dyn.Path{ + dyn.Key("a"), + dyn.Key("b"), + dyn.Key("c"), + dyn.Key("d"), + dyn.Key("e"), + } + + result, err := ensurePathExists(v, path) + require.NoError(t, err) + + // Verify all intermediate nodes exist + intermediate, err := dyn.GetByPath(result, dyn.Path{dyn.Key("a"), dyn.Key("b"), dyn.Key("c"), dyn.Key("d")}) + require.NoError(t, err) + assert.Equal(t, dyn.KindMap, intermediate.Kind()) + }) + + t.Run("handles path with existing sequence", func(t *testing.T) { + v := dyn.V(map[string]dyn.Value{ + "tasks": dyn.V([]dyn.Value{ + dyn.V(map[string]dyn.Value{ + "name": dyn.V("task1"), + }), + }), + }) + + path := dyn.Path{ + dyn.Key("tasks"), + dyn.Index(0), + dyn.Key("timeout"), + } + + result, err := ensurePathExists(v, path) + require.NoError(t, err) + + // Original sequence should still exist + tasks, err := dyn.GetByPath(result, dyn.Path{dyn.Key("tasks")}) + require.NoError(t, err) + assert.Equal(t, dyn.KindSequence, tasks.Kind()) + }) + + t.Run("fails when sequence index does not exist", func(t *testing.T) { + v := dyn.V(map[string]dyn.Value{}) + + path := dyn.Path{ + dyn.Key("tasks"), + dyn.Index(0), + dyn.Key("timeout"), + } + + _, err := ensurePathExists(v, path) + assert.Error(t, err) + assert.Contains(t, err.Error(), "sequence index does not exist") + }) + + t.Run("creates intermediate maps before sequence", func(t *testing.T) { + v := dyn.V(map[string]dyn.Value{}) + + // First ensure the path up to the sequence exists + pathToSeq := dyn.Path{ + dyn.Key("resources"), + dyn.Key("jobs"), + } + + result, err := ensurePathExists(v, pathToSeq) + require.NoError(t, err) + + // Manually add a sequence + result, err = dyn.SetByPath(result, pathToSeq, dyn.V([]dyn.Value{ + dyn.V(map[string]dyn.Value{"name": dyn.V("job1")}), + })) + require.NoError(t, err) + + fullPath := dyn.Path{ + dyn.Key("resources"), + dyn.Key("jobs"), + dyn.Index(0), + dyn.Key("tasks"), + } + + result, err = ensurePathExists(result, fullPath) + require.NoError(t, err) + + job, err := dyn.GetByPath(result, dyn.Path{dyn.Key("resources"), dyn.Key("jobs"), dyn.Index(0)}) + require.NoError(t, err) + assert.Equal(t, dyn.KindMap, job.Kind()) + }) +} diff --git a/bundle/configsync/yaml_generator.go b/bundle/configsync/yaml_generator.go index 54980ac866..457d7497c7 100644 --- a/bundle/configsync/yaml_generator.go +++ b/bundle/configsync/yaml_generator.go @@ -144,13 +144,18 @@ func applyChanges(ctx context.Context, resource dyn.Value, changes deployplan.Ch continue } - // Convert remote value to dyn.Value, handling custom types like enums remoteValue, err := convert.FromTyped(changeDesc.Remote, dyn.NilValue) if err != nil { log.Warnf(ctx, "Failed to convert remote value at path %s: %v", fieldPath, err) continue } + result, err = ensurePathExists(result, dynPath) + if err != nil { + log.Warnf(ctx, "Failed to ensure path exists for field %s: %v", fieldPath, err) + continue + } + newResult, err := dyn.SetByPath(result, dynPath, remoteValue) if err != nil { log.Warnf(ctx, "Failed to set value at path %s: %v", fieldPath, err) @@ -271,6 +276,13 @@ func GenerateYAMLFiles(ctx context.Context, b *bundle.Bundle, changes map[string continue } + // Ensure all intermediate nodes exist before setting + fileValue, err = ensurePathExists(fileValue, resourcePath) + if err != nil { + log.Warnf(ctx, "Failed to ensure path exists for resource %s: %v", item.resourceKey, err) + continue + } + // Update the file's dyn.Value with modified resource fileValue, err = dyn.SetByPath(fileValue, resourcePath, modifiedResource) if err != nil { diff --git a/bundle/configsync/yaml_generator_test.go b/bundle/configsync/yaml_generator_test.go index 12278653ee..e9ede27be9 100644 --- a/bundle/configsync/yaml_generator_test.go +++ b/bundle/configsync/yaml_generator_test.go @@ -714,3 +714,32 @@ func TestApplyChangesWithStructValues(t *testing.T) { require.NoError(t, err) assert.Equal(t, int64(3), retriesVal.MustInt()) } + +func TestApplyChanges_CreatesIntermediateNodes(t *testing.T) { + ctx := context.Background() + + // Resource without tags field + resource := dyn.V(map[string]dyn.Value{ + "name": dyn.V("test_job"), + }) + + // Change that requires creating tags map + changes := deployplan.Changes{ + "tags['test']": &deployplan.ChangeDesc{ + Remote: "val", + }, + } + + result, err := applyChanges(ctx, resource, changes) + require.NoError(t, err) + + // Verify tags map was created + tags, err := dyn.GetByPath(result, dyn.Path{dyn.Key("tags")}) + require.NoError(t, err) + assert.Equal(t, dyn.KindMap, tags.Kind()) + + // Verify test key was set + testVal, err := dyn.GetByPath(result, dyn.Path{dyn.Key("tags"), dyn.Key("test")}) + require.NoError(t, err) + assert.Equal(t, "val", testVal.MustString()) +} From ee2564da642f720ce7c2e941705c4e73b275f421 Mon Sep 17 00:00:00 2001 From: Ilya Kuznetsov Date: Thu, 15 Jan 2026 13:55:00 +0100 Subject: [PATCH 10/10] Fix sequences --- bundle/configsync/path.go | 29 +++++++++---- bundle/configsync/path_test.go | 74 ++++++++++++++++++++++++++-------- 2 files changed, 79 insertions(+), 24 deletions(-) diff --git a/bundle/configsync/path.go b/bundle/configsync/path.go index 343a1a5ad8..925a8fca2d 100644 --- a/bundle/configsync/path.go +++ b/bundle/configsync/path.go @@ -8,7 +8,7 @@ import ( // ensurePathExists ensures all intermediate nodes exist in the path. // It creates empty maps for missing intermediate map keys. -// For sequence indices, it verifies they exist but does not create them. +// For sequences, it creates empty sequences with empty map elements when needed. // Returns the modified value with all intermediate nodes guaranteed to exist. func ensurePathExists(v dyn.Value, path dyn.Path) (dyn.Value, error) { if len(path) == 0 { @@ -23,14 +23,27 @@ func ensurePathExists(v dyn.Value, path dyn.Path) (dyn.Value, error) { item, _ := dyn.GetByPath(result, prefixPath) if !item.IsValid() { if component.Key() != "" { - if i < len(path) && path[i].Key() == "" { - return dyn.InvalidValue, fmt.Errorf("sequence index does not exist at path %s", prefixPath) - } + key := path[i].Key() + isIndex := key == "" + isKey := key != "" - var err error - result, err = dyn.SetByPath(result, prefixPath, dyn.V(dyn.NewMapping())) - if err != nil { - return dyn.InvalidValue, fmt.Errorf("failed to create intermediate path %s: %w", prefixPath, err) + if i < len(path) && isIndex { + index := path[i].Index() + seq := make([]dyn.Value, index+1) + for j := range seq { + seq[j] = dyn.V(dyn.NewMapping()) + } + var err error + result, err = dyn.SetByPath(result, prefixPath, dyn.V(seq)) + if err != nil { + return dyn.InvalidValue, fmt.Errorf("failed to create sequence at path %s: %w", prefixPath, err) + } + } else if isKey { + var err error + result, err = dyn.SetByPath(result, prefixPath, dyn.V(dyn.NewMapping())) + if err != nil { + return dyn.InvalidValue, fmt.Errorf("failed to create intermediate path %s: %w", prefixPath, err) + } } } else { return dyn.InvalidValue, fmt.Errorf("sequence index does not exist at path %s", prefixPath) diff --git a/bundle/configsync/path_test.go b/bundle/configsync/path_test.go index faa29188a5..3c123f5453 100644 --- a/bundle/configsync/path_test.go +++ b/bundle/configsync/path_test.go @@ -28,7 +28,6 @@ func TestEnsurePathExists(t *testing.T) { result, err := ensurePathExists(v, path) require.NoError(t, err) - // Original key should still exist existing, err := dyn.GetByPath(result, dyn.Path{dyn.Key("existing")}) require.NoError(t, err) assert.Equal(t, "value", existing.MustString()) @@ -46,7 +45,6 @@ func TestEnsurePathExists(t *testing.T) { result, err := ensurePathExists(v, path) require.NoError(t, err) - // Check that all intermediate nodes exist level1, err := dyn.GetByPath(result, dyn.Path{dyn.Key("level1")}) require.NoError(t, err) assert.Equal(t, dyn.KindMap, level1.Kind()) @@ -72,12 +70,10 @@ func TestEnsurePathExists(t *testing.T) { result, err := ensurePathExists(v, path) require.NoError(t, err) - // Check that existing data is preserved existing, err := dyn.GetByPath(result, dyn.Path{dyn.Key("resources"), dyn.Key("existing")}) require.NoError(t, err) assert.Equal(t, "value", existing.MustString()) - // Check that new intermediate node was created jobs, err := dyn.GetByPath(result, dyn.Path{dyn.Key("resources"), dyn.Key("jobs")}) require.NoError(t, err) assert.Equal(t, dyn.KindMap, jobs.Kind()) @@ -103,7 +99,6 @@ func TestEnsurePathExists(t *testing.T) { result, err := ensurePathExists(v, path) require.NoError(t, err) - // Check that existing nested data is preserved name, err := dyn.GetByPath(result, dyn.Path{dyn.Key("resources"), dyn.Key("jobs"), dyn.Key("my_job"), dyn.Key("name")}) require.NoError(t, err) assert.Equal(t, "test", name.MustString()) @@ -118,11 +113,9 @@ func TestEnsurePathExists(t *testing.T) { dyn.Key("my_job"), } - // Ensure path exists result, err := ensurePathExists(v, path) require.NoError(t, err) - // Now SetByPath should work without errors finalValue := dyn.V(map[string]dyn.Value{ "name": dyn.V("test_job"), }) @@ -130,7 +123,6 @@ func TestEnsurePathExists(t *testing.T) { result, err = dyn.SetByPath(result, path, finalValue) require.NoError(t, err) - // Verify the value was set correctly job, err := dyn.GetByPath(result, path) require.NoError(t, err) jobMap, ok := job.AsMap() @@ -154,7 +146,6 @@ func TestEnsurePathExists(t *testing.T) { result, err := ensurePathExists(v, path) require.NoError(t, err) - // Verify all intermediate nodes exist intermediate, err := dyn.GetByPath(result, dyn.Path{dyn.Key("a"), dyn.Key("b"), dyn.Key("c"), dyn.Key("d")}) require.NoError(t, err) assert.Equal(t, dyn.KindMap, intermediate.Kind()) @@ -178,13 +169,12 @@ func TestEnsurePathExists(t *testing.T) { result, err := ensurePathExists(v, path) require.NoError(t, err) - // Original sequence should still exist tasks, err := dyn.GetByPath(result, dyn.Path{dyn.Key("tasks")}) require.NoError(t, err) assert.Equal(t, dyn.KindSequence, tasks.Kind()) }) - t.Run("fails when sequence index does not exist", func(t *testing.T) { + t.Run("creates sequence when index does not exist", func(t *testing.T) { v := dyn.V(map[string]dyn.Value{}) path := dyn.Path{ @@ -193,15 +183,22 @@ func TestEnsurePathExists(t *testing.T) { dyn.Key("timeout"), } - _, err := ensurePathExists(v, path) - assert.Error(t, err) - assert.Contains(t, err.Error(), "sequence index does not exist") + result, err := ensurePathExists(v, path) + require.NoError(t, err) + + tasks, err := dyn.GetByPath(result, dyn.Path{dyn.Key("tasks")}) + require.NoError(t, err) + assert.Equal(t, dyn.KindSequence, tasks.Kind()) + + seq, _ := tasks.AsSequence() + assert.Len(t, seq, 1) + + assert.Equal(t, dyn.KindMap, seq[0].Kind()) }) t.Run("creates intermediate maps before sequence", func(t *testing.T) { v := dyn.V(map[string]dyn.Value{}) - // First ensure the path up to the sequence exists pathToSeq := dyn.Path{ dyn.Key("resources"), dyn.Key("jobs"), @@ -210,7 +207,6 @@ func TestEnsurePathExists(t *testing.T) { result, err := ensurePathExists(v, pathToSeq) require.NoError(t, err) - // Manually add a sequence result, err = dyn.SetByPath(result, pathToSeq, dyn.V([]dyn.Value{ dyn.V(map[string]dyn.Value{"name": dyn.V("job1")}), })) @@ -230,4 +226,50 @@ func TestEnsurePathExists(t *testing.T) { require.NoError(t, err) assert.Equal(t, dyn.KindMap, job.Kind()) }) + + t.Run("creates sequence with multiple elements", func(t *testing.T) { + v := dyn.V(map[string]dyn.Value{}) + + path := dyn.Path{ + dyn.Key("items"), + dyn.Index(5), + dyn.Key("value"), + } + + result, err := ensurePathExists(v, path) + require.NoError(t, err) + + items, err := dyn.GetByPath(result, dyn.Path{dyn.Key("items")}) + require.NoError(t, err) + assert.Equal(t, dyn.KindSequence, items.Kind()) + + seq, _ := items.AsSequence() + assert.Len(t, seq, 6) + + for i, elem := range seq { + assert.Equal(t, dyn.KindMap, elem.Kind(), "element %d should be a map", i) + } + }) + + t.Run("handles nested paths within created sequence elements", func(t *testing.T) { + v := dyn.V(map[string]dyn.Value{}) + + path := dyn.Path{ + dyn.Key("jobs"), + dyn.Index(0), + dyn.Key("tasks"), + dyn.Key("main"), + } + + result, err := ensurePathExists(v, path) + require.NoError(t, err) + + tasks, err := dyn.GetByPath(result, dyn.Path{ + dyn.Key("jobs"), + dyn.Index(0), + dyn.Key("tasks"), + }) + require.NoError(t, err) + assert.Equal(t, dyn.KindMap, tasks.Kind()) + }) }