diff --git a/bundle/configsync/diff.go b/bundle/configsync/diff.go new file mode 100644 index 0000000000..de80b5a12b --- /dev/null +++ b/bundle/configsync/diff.go @@ -0,0 +1,46 @@ +package configsync + +import ( + "context" + "fmt" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/deployplan" + "github.com/databricks/cli/bundle/direct" + "github.com/databricks/cli/libs/log" +) + +// DetectChanges compares current remote state with the last deployed state +// and returns a map of resource changes. +func DetectChanges(ctx context.Context, b *bundle.Bundle) (map[string]deployplan.Changes, error) { + changes := make(map[string]deployplan.Changes) + + deployBundle := &direct.DeploymentBundle{} + // TODO: for Terraform engine we should read the state file, converted to direct state format, it should be created during deployment + _, statePath := b.StateFilenameDirect(ctx) + + plan, err := deployBundle.CalculatePlan(ctx, b.WorkspaceClient(), &b.Config, statePath) + if err != nil { + return nil, fmt.Errorf("failed to calculate plan: %w", err) + } + + for resourceKey, entry := range plan.Plan { + resourceChanges := make(deployplan.Changes) + + if entry.Changes != nil { + for path, changeDesc := range entry.Changes { + if changeDesc.Remote != nil && changeDesc.Action != deployplan.Skip { + resourceChanges[path] = changeDesc + } + } + } + + if len(resourceChanges) != 0 { + changes[resourceKey] = resourceChanges + } + + log.Debugf(ctx, "Resource %s has %d changes", resourceKey, len(resourceChanges)) + } + + return changes, nil +} diff --git a/bundle/configsync/format.go b/bundle/configsync/format.go new file mode 100644 index 0000000000..e4416e0c78 --- /dev/null +++ b/bundle/configsync/format.go @@ -0,0 +1,30 @@ +package configsync + +import ( + "fmt" + "strings" + + "github.com/databricks/cli/bundle/deployplan" +) + +// FormatTextOutput formats the config changes as human-readable text. Useful for debugging +func FormatTextOutput(changes map[string]deployplan.Changes) string { + var output strings.Builder + + if len(changes) == 0 { + output.WriteString("No changes detected.\n") + return output.String() + } + + output.WriteString(fmt.Sprintf("Detected changes in %d resource(s):\n\n", len(changes))) + + for resourceKey, resourceChanges := range changes { + output.WriteString(fmt.Sprintf("Resource: %s\n", resourceKey)) + + for path, changeDesc := range resourceChanges { + output.WriteString(fmt.Sprintf(" %s: %s\n", path, changeDesc.Action)) + } + } + + return output.String() +} diff --git a/bundle/configsync/output.go b/bundle/configsync/output.go new file mode 100644 index 0000000000..fad2fd7636 --- /dev/null +++ b/bundle/configsync/output.go @@ -0,0 +1,39 @@ +package configsync + +import ( + "context" + "os" + "path/filepath" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/deployplan" +) + +// FileChange represents a change to a bundle configuration file +type FileChange struct { + Path string `json:"path"` + OriginalContent string `json:"originalContent"` + ModifiedContent string `json:"modifiedContent"` +} + +// DiffOutput represents the complete output of the config-remote-sync command +type DiffOutput struct { + Files []FileChange `json:"files"` + Changes map[string]deployplan.Changes `json:"changes"` +} + +// SaveFiles writes all file changes to disk. +func SaveFiles(ctx context.Context, b *bundle.Bundle, files []FileChange) error { + for _, file := range files { + err := os.MkdirAll(filepath.Dir(file.Path), 0o755) + if err != nil { + return err + } + + err = os.WriteFile(file.Path, []byte(file.ModifiedContent), 0o644) + if err != nil { + return err + } + } + return nil +} diff --git a/bundle/configsync/output_test.go b/bundle/configsync/output_test.go new file mode 100644 index 0000000000..1b35b807d8 --- /dev/null +++ b/bundle/configsync/output_test.go @@ -0,0 +1,89 @@ +package configsync + +import ( + "context" + "os" + "path/filepath" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestSaveFiles_Success(t *testing.T) { + ctx := context.Background() + + tmpDir := t.TempDir() + + yamlPath := filepath.Join(tmpDir, "subdir", "databricks.yml") + modifiedContent := `resources: + jobs: + test_job: + name: "Updated Job" + timeout_seconds: 7200 +` + + files := []FileChange{ + { + Path: yamlPath, + OriginalContent: "original content", + ModifiedContent: modifiedContent, + }, + } + + err := SaveFiles(ctx, &bundle.Bundle{}, files) + require.NoError(t, err) + + _, err = os.Stat(yamlPath) + require.NoError(t, err) + + content, err := os.ReadFile(yamlPath) + require.NoError(t, err) + assert.Equal(t, modifiedContent, string(content)) + + _, err = os.Stat(filepath.Dir(yamlPath)) + require.NoError(t, err) +} + +func TestSaveFiles_MultipleFiles(t *testing.T) { + ctx := context.Background() + + tmpDir := t.TempDir() + + file1Path := filepath.Join(tmpDir, "file1.yml") + file2Path := filepath.Join(tmpDir, "subdir", "file2.yml") + content1 := "content for file 1" + content2 := "content for file 2" + + files := []FileChange{ + { + Path: file1Path, + OriginalContent: "original 1", + ModifiedContent: content1, + }, + { + Path: file2Path, + OriginalContent: "original 2", + ModifiedContent: content2, + }, + } + + err := SaveFiles(ctx, &bundle.Bundle{}, files) + require.NoError(t, err) + + content, err := os.ReadFile(file1Path) + require.NoError(t, err) + assert.Equal(t, content1, string(content)) + + content, err = os.ReadFile(file2Path) + require.NoError(t, err) + assert.Equal(t, content2, string(content)) +} + +func TestSaveFiles_EmptyList(t *testing.T) { + ctx := context.Background() + + err := SaveFiles(ctx, &bundle.Bundle{}, []FileChange{}) + require.NoError(t, err) +} diff --git a/bundle/configsync/path.go b/bundle/configsync/path.go new file mode 100644 index 0000000000..925a8fca2d --- /dev/null +++ b/bundle/configsync/path.go @@ -0,0 +1,55 @@ +package configsync + +import ( + "fmt" + + "github.com/databricks/cli/libs/dyn" +) + +// ensurePathExists ensures all intermediate nodes exist in the path. +// It creates empty maps for missing intermediate map keys. +// For sequences, it creates empty sequences with empty map elements when needed. +// Returns the modified value with all intermediate nodes guaranteed to exist. +func ensurePathExists(v dyn.Value, path dyn.Path) (dyn.Value, error) { + if len(path) == 0 { + return v, nil + } + + result := v + for i := 1; i < len(path); i++ { + prefixPath := path[:i] + component := path[i-1] + + item, _ := dyn.GetByPath(result, prefixPath) + if !item.IsValid() { + if component.Key() != "" { + key := path[i].Key() + isIndex := key == "" + isKey := key != "" + + if i < len(path) && isIndex { + index := path[i].Index() + seq := make([]dyn.Value, index+1) + for j := range seq { + seq[j] = dyn.V(dyn.NewMapping()) + } + var err error + result, err = dyn.SetByPath(result, prefixPath, dyn.V(seq)) + if err != nil { + return dyn.InvalidValue, fmt.Errorf("failed to create sequence at path %s: %w", prefixPath, err) + } + } else if isKey { + var err error + result, err = dyn.SetByPath(result, prefixPath, dyn.V(dyn.NewMapping())) + if err != nil { + return dyn.InvalidValue, fmt.Errorf("failed to create intermediate path %s: %w", prefixPath, err) + } + } + } else { + return dyn.InvalidValue, fmt.Errorf("sequence index does not exist at path %s", prefixPath) + } + } + } + + return result, nil +} diff --git a/bundle/configsync/path_test.go b/bundle/configsync/path_test.go new file mode 100644 index 0000000000..3c123f5453 --- /dev/null +++ b/bundle/configsync/path_test.go @@ -0,0 +1,275 @@ +package configsync + +import ( + "testing" + + "github.com/databricks/cli/libs/dyn" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestEnsurePathExists(t *testing.T) { + t.Run("empty path returns original value", func(t *testing.T) { + v := dyn.V(map[string]dyn.Value{ + "foo": dyn.V("bar"), + }) + + result, err := ensurePathExists(v, dyn.Path{}) + require.NoError(t, err) + assert.Equal(t, v, result) + }) + + t.Run("single-level path on existing map", func(t *testing.T) { + v := dyn.V(map[string]dyn.Value{ + "existing": dyn.V("value"), + }) + + path := dyn.Path{dyn.Key("new")} + result, err := ensurePathExists(v, path) + require.NoError(t, err) + + existing, err := dyn.GetByPath(result, dyn.Path{dyn.Key("existing")}) + require.NoError(t, err) + assert.Equal(t, "value", existing.MustString()) + }) + + t.Run("multi-level nested path creates all intermediate nodes", func(t *testing.T) { + v := dyn.V(map[string]dyn.Value{}) + + path := dyn.Path{ + dyn.Key("level1"), + dyn.Key("level2"), + dyn.Key("level3"), + } + + result, err := ensurePathExists(v, path) + require.NoError(t, err) + + level1, err := dyn.GetByPath(result, dyn.Path{dyn.Key("level1")}) + require.NoError(t, err) + assert.Equal(t, dyn.KindMap, level1.Kind()) + + level2, err := dyn.GetByPath(result, dyn.Path{dyn.Key("level1"), dyn.Key("level2")}) + require.NoError(t, err) + assert.Equal(t, dyn.KindMap, level2.Kind()) + }) + + t.Run("partially existing path creates only missing nodes", func(t *testing.T) { + v := dyn.V(map[string]dyn.Value{ + "resources": dyn.V(map[string]dyn.Value{ + "existing": dyn.V("value"), + }), + }) + + path := dyn.Path{ + dyn.Key("resources"), + dyn.Key("jobs"), + dyn.Key("my_job"), + } + + result, err := ensurePathExists(v, path) + require.NoError(t, err) + + existing, err := dyn.GetByPath(result, dyn.Path{dyn.Key("resources"), dyn.Key("existing")}) + require.NoError(t, err) + assert.Equal(t, "value", existing.MustString()) + + jobs, err := dyn.GetByPath(result, dyn.Path{dyn.Key("resources"), dyn.Key("jobs")}) + require.NoError(t, err) + assert.Equal(t, dyn.KindMap, jobs.Kind()) + }) + + t.Run("fully existing path is idempotent", func(t *testing.T) { + v := dyn.V(map[string]dyn.Value{ + "resources": dyn.V(map[string]dyn.Value{ + "jobs": dyn.V(map[string]dyn.Value{ + "my_job": dyn.V(map[string]dyn.Value{ + "name": dyn.V("test"), + }), + }), + }), + }) + + path := dyn.Path{ + dyn.Key("resources"), + dyn.Key("jobs"), + dyn.Key("my_job"), + } + + result, err := ensurePathExists(v, path) + require.NoError(t, err) + + name, err := dyn.GetByPath(result, dyn.Path{dyn.Key("resources"), dyn.Key("jobs"), dyn.Key("my_job"), dyn.Key("name")}) + require.NoError(t, err) + assert.Equal(t, "test", name.MustString()) + }) + + t.Run("can set value after ensuring path exists", func(t *testing.T) { + v := dyn.V(map[string]dyn.Value{}) + + path := dyn.Path{ + dyn.Key("resources"), + dyn.Key("jobs"), + dyn.Key("my_job"), + } + + result, err := ensurePathExists(v, path) + require.NoError(t, err) + + finalValue := dyn.V(map[string]dyn.Value{ + "name": dyn.V("test_job"), + }) + + result, err = dyn.SetByPath(result, path, finalValue) + require.NoError(t, err) + + job, err := dyn.GetByPath(result, path) + require.NoError(t, err) + jobMap, ok := job.AsMap() + require.True(t, ok) + name, exists := jobMap.GetByString("name") + require.True(t, exists) + assert.Equal(t, "test_job", name.MustString()) + }) + + t.Run("handles deeply nested paths", func(t *testing.T) { + v := dyn.V(map[string]dyn.Value{}) + + path := dyn.Path{ + dyn.Key("a"), + dyn.Key("b"), + dyn.Key("c"), + dyn.Key("d"), + dyn.Key("e"), + } + + result, err := ensurePathExists(v, path) + require.NoError(t, err) + + intermediate, err := dyn.GetByPath(result, dyn.Path{dyn.Key("a"), dyn.Key("b"), dyn.Key("c"), dyn.Key("d")}) + require.NoError(t, err) + assert.Equal(t, dyn.KindMap, intermediate.Kind()) + }) + + t.Run("handles path with existing sequence", func(t *testing.T) { + v := dyn.V(map[string]dyn.Value{ + "tasks": dyn.V([]dyn.Value{ + dyn.V(map[string]dyn.Value{ + "name": dyn.V("task1"), + }), + }), + }) + + path := dyn.Path{ + dyn.Key("tasks"), + dyn.Index(0), + dyn.Key("timeout"), + } + + result, err := ensurePathExists(v, path) + require.NoError(t, err) + + tasks, err := dyn.GetByPath(result, dyn.Path{dyn.Key("tasks")}) + require.NoError(t, err) + assert.Equal(t, dyn.KindSequence, tasks.Kind()) + }) + + t.Run("creates sequence when index does not exist", func(t *testing.T) { + v := dyn.V(map[string]dyn.Value{}) + + path := dyn.Path{ + dyn.Key("tasks"), + dyn.Index(0), + dyn.Key("timeout"), + } + + result, err := ensurePathExists(v, path) + require.NoError(t, err) + + tasks, err := dyn.GetByPath(result, dyn.Path{dyn.Key("tasks")}) + require.NoError(t, err) + assert.Equal(t, dyn.KindSequence, tasks.Kind()) + + seq, _ := tasks.AsSequence() + assert.Len(t, seq, 1) + + assert.Equal(t, dyn.KindMap, seq[0].Kind()) + }) + + t.Run("creates intermediate maps before sequence", func(t *testing.T) { + v := dyn.V(map[string]dyn.Value{}) + + pathToSeq := dyn.Path{ + dyn.Key("resources"), + dyn.Key("jobs"), + } + + result, err := ensurePathExists(v, pathToSeq) + require.NoError(t, err) + + result, err = dyn.SetByPath(result, pathToSeq, dyn.V([]dyn.Value{ + dyn.V(map[string]dyn.Value{"name": dyn.V("job1")}), + })) + require.NoError(t, err) + + fullPath := dyn.Path{ + dyn.Key("resources"), + dyn.Key("jobs"), + dyn.Index(0), + dyn.Key("tasks"), + } + + result, err = ensurePathExists(result, fullPath) + require.NoError(t, err) + + job, err := dyn.GetByPath(result, dyn.Path{dyn.Key("resources"), dyn.Key("jobs"), dyn.Index(0)}) + require.NoError(t, err) + assert.Equal(t, dyn.KindMap, job.Kind()) + }) + + t.Run("creates sequence with multiple elements", func(t *testing.T) { + v := dyn.V(map[string]dyn.Value{}) + + path := dyn.Path{ + dyn.Key("items"), + dyn.Index(5), + dyn.Key("value"), + } + + result, err := ensurePathExists(v, path) + require.NoError(t, err) + + items, err := dyn.GetByPath(result, dyn.Path{dyn.Key("items")}) + require.NoError(t, err) + assert.Equal(t, dyn.KindSequence, items.Kind()) + + seq, _ := items.AsSequence() + assert.Len(t, seq, 6) + + for i, elem := range seq { + assert.Equal(t, dyn.KindMap, elem.Kind(), "element %d should be a map", i) + } + }) + + t.Run("handles nested paths within created sequence elements", func(t *testing.T) { + v := dyn.V(map[string]dyn.Value{}) + + path := dyn.Path{ + dyn.Key("jobs"), + dyn.Index(0), + dyn.Key("tasks"), + dyn.Key("main"), + } + + result, err := ensurePathExists(v, path) + require.NoError(t, err) + + tasks, err := dyn.GetByPath(result, dyn.Path{ + dyn.Key("jobs"), + dyn.Index(0), + dyn.Key("tasks"), + }) + require.NoError(t, err) + assert.Equal(t, dyn.KindMap, tasks.Kind()) + }) +} diff --git a/bundle/configsync/yaml_generator.go b/bundle/configsync/yaml_generator.go new file mode 100644 index 0000000000..457d7497c7 --- /dev/null +++ b/bundle/configsync/yaml_generator.go @@ -0,0 +1,309 @@ +package configsync + +import ( + "bytes" + "context" + "errors" + "fmt" + "os" + "strings" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/deployplan" + "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn/convert" + "github.com/databricks/cli/libs/dyn/yamlloader" + "github.com/databricks/cli/libs/log" + "github.com/databricks/cli/libs/structs/structpath" + "gopkg.in/yaml.v3" +) + +// resourceKeyToDynPath converts a resource key to a dyn.Path +// Example: "resources.jobs.my_job" -> Path{Key("resources"), Key("jobs"), Key("my_job")} +func resourceKeyToDynPath(resourceKey string) (dyn.Path, error) { + if resourceKey == "" { + return nil, errors.New("invalid resource key: empty string") + } + + parts := strings.Split(resourceKey, ".") + if len(parts) == 0 { + return nil, fmt.Errorf("invalid resource key: %s", resourceKey) + } + + path := make(dyn.Path, len(parts)) + for i, part := range parts { + path[i] = dyn.Key(part) + } + + return path, nil +} + +// getResourceWithLocation retrieves a resource dyn.Value and its file location +// Uses the dynamic config value, not typed structures +func getResourceWithLocation(configValue dyn.Value, resourceKey string) (dyn.Value, dyn.Location, error) { + path, err := resourceKeyToDynPath(resourceKey) + if err != nil { + return dyn.NilValue, dyn.Location{}, err + } + + resource, err := dyn.GetByPath(configValue, path) + if err != nil { + return dyn.NilValue, dyn.Location{}, fmt.Errorf("resource %s not found: %w", resourceKey, err) + } + + return resource, resource.Location(), nil +} + +// structpathToDynPath converts a structpath string to a dyn.Path +// Example: "tasks[0].timeout_seconds" -> Path{Key("tasks"), Index(0), Key("timeout_seconds")} +// Also supports "tasks[task_key='my_task']" syntax for array element selection by field value +func structpathToDynPath(_ context.Context, pathStr string, baseValue dyn.Value) (dyn.Path, error) { + node, err := structpath.Parse(pathStr) + if err != nil { + return nil, fmt.Errorf("failed to parse path %s: %w", pathStr, err) + } + + nodes := node.AsSlice() + + var dynPath dyn.Path + currentValue := baseValue + + for _, n := range nodes { + // Check for string key (field access) + if key, ok := n.StringKey(); ok { + dynPath = append(dynPath, dyn.Key(key)) + + // Update currentValue for next iteration + if currentValue.IsValid() { + currentValue, _ = dyn.GetByPath(currentValue, dyn.Path{dyn.Key(key)}) + } + continue + } + + // Check for numeric index + if idx, ok := n.Index(); ok { + dynPath = append(dynPath, dyn.Index(idx)) + + // Update currentValue for next iteration + if currentValue.IsValid() { + currentValue, _ = dyn.GetByPath(currentValue, dyn.Path{dyn.Index(idx)}) + } + continue + } + + // Check for key-value selector: [key='value'] + if key, value, ok := n.KeyValue(); ok { + // Need to search the array to find the matching index + if !currentValue.IsValid() || currentValue.Kind() != dyn.KindSequence { + return nil, fmt.Errorf("cannot apply [key='value'] selector to non-array value at path %s", dynPath.String()) + } + + seq, _ := currentValue.AsSequence() + foundIndex := -1 + + for i, elem := range seq { + keyValue, err := dyn.GetByPath(elem, dyn.Path{dyn.Key(key)}) + if err != nil { + continue + } + + // Compare the key value + if keyValue.Kind() == dyn.KindString && keyValue.MustString() == value { + foundIndex = i + break + } + } + + if foundIndex == -1 { + return nil, fmt.Errorf("no array element found with %s='%s' at path %s", key, value, dynPath.String()) + } + + dynPath = append(dynPath, dyn.Index(foundIndex)) + currentValue = seq[foundIndex] + continue + } + + // Skip wildcards or other special node types + if n.DotStar() || n.BracketStar() { + return nil, errors.New("wildcard patterns are not supported in field paths") + } + } + + return dynPath, nil +} + +// applyChanges applies all field changes to a resource dyn.Value +func applyChanges(ctx context.Context, resource dyn.Value, changes deployplan.Changes) (dyn.Value, error) { + result := resource + + for fieldPath, changeDesc := range changes { + // Convert structpath to dyn.Path + dynPath, err := structpathToDynPath(ctx, fieldPath, result) + if err != nil { + log.Warnf(ctx, "Failed to parse field path %s: %v", fieldPath, err) + continue + } + + remoteValue, err := convert.FromTyped(changeDesc.Remote, dyn.NilValue) + if err != nil { + log.Warnf(ctx, "Failed to convert remote value at path %s: %v", fieldPath, err) + continue + } + + result, err = ensurePathExists(result, dynPath) + if err != nil { + log.Warnf(ctx, "Failed to ensure path exists for field %s: %v", fieldPath, err) + continue + } + + newResult, err := dyn.SetByPath(result, dynPath, remoteValue) + if err != nil { + log.Warnf(ctx, "Failed to set value at path %s: %v", fieldPath, err) + continue + } + result = newResult + } + + return result, nil +} + +// dynValueToYAML converts a dyn.Value to a YAML string +func dynValueToYAML(v dyn.Value) (string, error) { + var buf bytes.Buffer + enc := yaml.NewEncoder(&buf) + enc.SetIndent(2) + + if err := enc.Encode(v.AsAny()); err != nil { + return "", err + } + + return buf.String(), nil +} + +// parseResourceKey extracts resource type and name from a resource key +// Example: "resources.jobs.my_job" -> type="jobs", name="my_job" +func parseResourceKey(resourceKey string) (resourceType, resourceName string, err error) { + parts := strings.Split(resourceKey, ".") + if len(parts) < 3 || parts[0] != "resources" { + return "", "", fmt.Errorf("invalid resource key format: %s (expected resources.TYPE.NAME)", resourceKey) + } + + return parts[1], parts[2], nil +} + +// findResourceInFile searches for a resource within a loaded file's dyn.Value +func findResourceInFile(_ context.Context, fileValue dyn.Value, resourceType, resourceName, targetName string) (dyn.Value, dyn.Path, error) { + patternsToCheck := []dyn.Path{ + {dyn.Key("targets"), dyn.Key(targetName), dyn.Key("resources"), dyn.Key(resourceType), dyn.Key(resourceName)}, + {dyn.Key("resources"), dyn.Key(resourceType), dyn.Key(resourceName)}, + } + + for _, pattern := range patternsToCheck { + resource, err := dyn.GetByPath(fileValue, pattern) + if err == nil { + return resource, pattern, nil + } + } + + directPath := dyn.Path{dyn.Key("resources"), dyn.Key(resourceType), dyn.Key(resourceName)} + resource, err := dyn.GetByPath(fileValue, directPath) + if err == nil { + return resource, directPath, nil + } + + return dyn.NilValue, nil, fmt.Errorf("resource %s.%s not found in file", resourceType, resourceName) +} + +// GenerateYAMLFiles generates YAML files for the given changes. +func GenerateYAMLFiles(ctx context.Context, b *bundle.Bundle, changes map[string]deployplan.Changes) ([]FileChange, error) { + configValue := b.Config.Value() + + fileChanges := make(map[string][]struct { + resourceKey string + changes deployplan.Changes + }) + + for resourceKey, resourceChanges := range changes { + _, loc, err := getResourceWithLocation(configValue, resourceKey) + if err != nil { + log.Warnf(ctx, "Failed to find resource %s in bundle config: %v", resourceKey, err) + continue + } + + filePath := loc.File + fileChanges[filePath] = append(fileChanges[filePath], struct { + resourceKey string + changes deployplan.Changes + }{resourceKey, resourceChanges}) + } + + var result []FileChange + + for filePath, resourcesInFile := range fileChanges { + content, err := os.ReadFile(filePath) + if err != nil { + log.Warnf(ctx, "Failed to read file %s: %v", filePath, err) + continue + } + + // Load file as dyn.Value + fileValue, err := yamlloader.LoadYAML(filePath, bytes.NewBuffer(content)) + if err != nil { + log.Warnf(ctx, "Failed to parse YAML file %s: %v", filePath, err) + continue + } + + // Apply changes for each resource in this file + for _, item := range resourcesInFile { + // Parse resource key + resourceType, resourceName, err := parseResourceKey(item.resourceKey) + if err != nil { + log.Warnf(ctx, "Failed to parse resource key %s: %v", item.resourceKey, err) + continue + } + + // Find resource in loaded file + resource, resourcePath, err := findResourceInFile(ctx, fileValue, resourceType, resourceName, b.Config.Bundle.Target) + if err != nil { + log.Warnf(ctx, "Failed to find resource %s in file %s: %v", item.resourceKey, filePath, err) + continue + } + + // Apply changes to the resource + modifiedResource, err := applyChanges(ctx, resource, item.changes) + if err != nil { + log.Warnf(ctx, "Failed to apply changes to resource %s: %v", item.resourceKey, err) + continue + } + + // Ensure all intermediate nodes exist before setting + fileValue, err = ensurePathExists(fileValue, resourcePath) + if err != nil { + log.Warnf(ctx, "Failed to ensure path exists for resource %s: %v", item.resourceKey, err) + continue + } + + // Update the file's dyn.Value with modified resource + fileValue, err = dyn.SetByPath(fileValue, resourcePath, modifiedResource) + if err != nil { + log.Warnf(ctx, "Failed to update file value for resource %s: %v", item.resourceKey, err) + continue + } + } + + // Convert modified dyn.Value to YAML string + modifiedContent, err := dynValueToYAML(fileValue) + if err != nil { + log.Warnf(ctx, "Failed to convert modified value to YAML for file %s: %v", filePath, err) + continue + } + + result = append(result, FileChange{ + Path: filePath, + OriginalContent: string(content), + ModifiedContent: modifiedContent, + }) + } + + return result, nil +} diff --git a/bundle/configsync/yaml_generator_test.go b/bundle/configsync/yaml_generator_test.go new file mode 100644 index 0000000000..e9ede27be9 --- /dev/null +++ b/bundle/configsync/yaml_generator_test.go @@ -0,0 +1,745 @@ +package configsync + +import ( + "context" + "os" + "path/filepath" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/cli/bundle/deployplan" + "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/logdiag" + "github.com/databricks/databricks-sdk-go/service/jobs" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" +) + +func TestGenerateYAMLFiles_SimpleFieldChange(t *testing.T) { + ctx := logdiag.InitContext(context.Background()) + + tmpDir := t.TempDir() + + yamlContent := `resources: + jobs: + test_job: + name: "Test Job" + timeout_seconds: 3600 + tasks: + - task_key: "main_task" + notebook_task: + notebook_path: "/path/to/notebook" +` + + yamlPath := filepath.Join(tmpDir, "databricks.yml") + err := os.WriteFile(yamlPath, []byte(yamlContent), 0o644) + require.NoError(t, err) + + b, err := bundle.Load(ctx, tmpDir) + require.NoError(t, err) + + mutator.DefaultMutators(ctx, b) + + changes := map[string]deployplan.Changes{ + "resources.jobs.test_job": { + "timeout_seconds": &deployplan.ChangeDesc{ + Action: deployplan.Update, + Old: 3600, + Remote: 7200, + }, + }, + } + + fileChanges, err := GenerateYAMLFiles(ctx, b, changes) + require.NoError(t, err) + require.Len(t, fileChanges, 1) + + assert.Equal(t, yamlPath, fileChanges[0].Path) + assert.Contains(t, fileChanges[0].OriginalContent, "timeout_seconds: 3600") + assert.Contains(t, fileChanges[0].ModifiedContent, "timeout_seconds: 7200") + assert.NotContains(t, fileChanges[0].ModifiedContent, "timeout_seconds: 3600") +} + +func TestGenerateYAMLFiles_NestedFieldChange(t *testing.T) { + ctx := logdiag.InitContext(context.Background()) + + tmpDir := t.TempDir() + + yamlContent := `resources: + jobs: + test_job: + name: "Test Job" + tasks: + - task_key: "main_task" + notebook_task: + notebook_path: "/path/to/notebook" + timeout_seconds: 1800 +` + + yamlPath := filepath.Join(tmpDir, "databricks.yml") + err := os.WriteFile(yamlPath, []byte(yamlContent), 0o644) + require.NoError(t, err) + + b, err := bundle.Load(ctx, tmpDir) + require.NoError(t, err) + + mutator.DefaultMutators(ctx, b) + + changes := map[string]deployplan.Changes{ + "resources.jobs.test_job": { + "tasks[0].timeout_seconds": &deployplan.ChangeDesc{ + Action: deployplan.Update, + Old: 1800, + Remote: 3600, + }, + }, + } + + fileChanges, err := GenerateYAMLFiles(ctx, b, changes) + require.NoError(t, err) + require.Len(t, fileChanges, 1) + + assert.Contains(t, fileChanges[0].ModifiedContent, "timeout_seconds: 3600") + + var result map[string]any + err = yaml.Unmarshal([]byte(fileChanges[0].ModifiedContent), &result) + require.NoError(t, err) + + resources := result["resources"].(map[string]any) + jobs := resources["jobs"].(map[string]any) + testJob := jobs["test_job"].(map[string]any) + tasks := testJob["tasks"].([]any) + task0 := tasks[0].(map[string]any) + + assert.Equal(t, 3600, task0["timeout_seconds"]) +} + +func TestGenerateYAMLFiles_ArrayKeyValueAccess(t *testing.T) { + ctx := logdiag.InitContext(context.Background()) + + tmpDir := t.TempDir() + + yamlContent := `resources: + jobs: + test_job: + name: "Test Job" + tasks: + - task_key: "setup_task" + notebook_task: + notebook_path: "/setup" + timeout_seconds: 600 + - task_key: "main_task" + notebook_task: + notebook_path: "/main" + timeout_seconds: 1800 +` + + yamlPath := filepath.Join(tmpDir, "databricks.yml") + err := os.WriteFile(yamlPath, []byte(yamlContent), 0o644) + require.NoError(t, err) + + b, err := bundle.Load(ctx, tmpDir) + require.NoError(t, err) + + mutator.DefaultMutators(ctx, b) + + changes := map[string]deployplan.Changes{ + "resources.jobs.test_job": { + "tasks[task_key='main_task'].timeout_seconds": &deployplan.ChangeDesc{ + Action: deployplan.Update, + Old: 1800, + Remote: 3600, + }, + }, + } + + fileChanges, err := GenerateYAMLFiles(ctx, b, changes) + require.NoError(t, err) + require.Len(t, fileChanges, 1) + + var result map[string]any + err = yaml.Unmarshal([]byte(fileChanges[0].ModifiedContent), &result) + require.NoError(t, err) + + resources := result["resources"].(map[string]any) + jobs := resources["jobs"].(map[string]any) + testJob := jobs["test_job"].(map[string]any) + tasks := testJob["tasks"].([]any) + + task0 := tasks[0].(map[string]any) + assert.Equal(t, "setup_task", task0["task_key"]) + assert.Equal(t, 600, task0["timeout_seconds"]) + + task1 := tasks[1].(map[string]any) + assert.Equal(t, "main_task", task1["task_key"]) + assert.Equal(t, 3600, task1["timeout_seconds"]) +} + +func TestGenerateYAMLFiles_MultipleResourcesSameFile(t *testing.T) { + ctx := logdiag.InitContext(context.Background()) + + tmpDir := t.TempDir() + + yamlContent := `resources: + jobs: + job1: + name: "Job 1" + timeout_seconds: 3600 + job2: + name: "Job 2" + timeout_seconds: 1800 +` + + yamlPath := filepath.Join(tmpDir, "databricks.yml") + err := os.WriteFile(yamlPath, []byte(yamlContent), 0o644) + require.NoError(t, err) + + b, err := bundle.Load(ctx, tmpDir) + require.NoError(t, err) + + mutator.DefaultMutators(ctx, b) + + changes := map[string]deployplan.Changes{ + "resources.jobs.job1": { + "timeout_seconds": &deployplan.ChangeDesc{ + Action: deployplan.Update, + Old: 3600, + Remote: 7200, + }, + }, + "resources.jobs.job2": { + "timeout_seconds": &deployplan.ChangeDesc{ + Action: deployplan.Update, + Old: 1800, + Remote: 3600, + }, + }, + } + + fileChanges, err := GenerateYAMLFiles(ctx, b, changes) + require.NoError(t, err) + + require.Len(t, fileChanges, 1) + assert.Equal(t, yamlPath, fileChanges[0].Path) + + assert.Contains(t, fileChanges[0].ModifiedContent, "job1") + assert.Contains(t, fileChanges[0].ModifiedContent, "job2") + + var result map[string]any + err = yaml.Unmarshal([]byte(fileChanges[0].ModifiedContent), &result) + require.NoError(t, err) + + resources := result["resources"].(map[string]any) + jobs := resources["jobs"].(map[string]any) + + job1 := jobs["job1"].(map[string]any) + assert.Equal(t, 7200, job1["timeout_seconds"]) + + job2 := jobs["job2"].(map[string]any) + assert.Equal(t, 3600, job2["timeout_seconds"]) +} + +func TestGenerateYAMLFiles_ResourceNotFound(t *testing.T) { + ctx := logdiag.InitContext(context.Background()) + + tmpDir := t.TempDir() + + yamlContent := `resources: + jobs: + existing_job: + name: "Existing Job" +` + + yamlPath := filepath.Join(tmpDir, "databricks.yml") + err := os.WriteFile(yamlPath, []byte(yamlContent), 0o644) + require.NoError(t, err) + + b, err := bundle.Load(ctx, tmpDir) + require.NoError(t, err) + + mutator.DefaultMutators(ctx, b) + + changes := map[string]deployplan.Changes{ + "resources.jobs.nonexistent_job": { + "timeout_seconds": &deployplan.ChangeDesc{ + Action: deployplan.Update, + Remote: 3600, + }, + }, + } + + fileChanges, err := GenerateYAMLFiles(ctx, b, changes) + require.NoError(t, err) + + assert.Len(t, fileChanges, 0) +} + +func TestGenerateYAMLFiles_InvalidFieldPath(t *testing.T) { + ctx := logdiag.InitContext(context.Background()) + + tmpDir := t.TempDir() + + yamlContent := `resources: + jobs: + test_job: + name: "Test Job" + timeout_seconds: 3600 +` + + yamlPath := filepath.Join(tmpDir, "databricks.yml") + err := os.WriteFile(yamlPath, []byte(yamlContent), 0o644) + require.NoError(t, err) + + b, err := bundle.Load(ctx, tmpDir) + require.NoError(t, err) + + mutator.DefaultMutators(ctx, b) + + changes := map[string]deployplan.Changes{ + "resources.jobs.test_job": { + "invalid[[[path": &deployplan.ChangeDesc{ + Action: deployplan.Update, + Remote: 7200, + }, + }, + } + + fileChanges, err := GenerateYAMLFiles(ctx, b, changes) + require.NoError(t, err) + + if len(fileChanges) > 0 { + assert.Contains(t, fileChanges[0].ModifiedContent, "timeout_seconds: 3600") + + var result map[string]any + err = yaml.Unmarshal([]byte(fileChanges[0].ModifiedContent), &result) + require.NoError(t, err) + + resources := result["resources"].(map[string]any) + jobs := resources["jobs"].(map[string]any) + testJob := jobs["test_job"].(map[string]any) + assert.Equal(t, 3600, testJob["timeout_seconds"]) + } +} + +func TestGenerateYAMLFiles_Include(t *testing.T) { + ctx := logdiag.InitContext(context.Background()) + + tmpDir := t.TempDir() + + mainYAML := `bundle: + name: test-bundle + +include: + - "targets/*.yml" +` + + mainPath := filepath.Join(tmpDir, "databricks.yml") + err := os.WriteFile(mainPath, []byte(mainYAML), 0o644) + require.NoError(t, err) + + targetsDir := filepath.Join(tmpDir, "targets") + err = os.MkdirAll(targetsDir, 0o755) + require.NoError(t, err) + + devYAML := `resources: + jobs: + dev_job: + name: "Dev Job" + timeout_seconds: 1800 +` + + devPath := filepath.Join(targetsDir, "dev.yml") + err = os.WriteFile(devPath, []byte(devYAML), 0o644) + require.NoError(t, err) + + b, err := bundle.Load(ctx, tmpDir) + require.NoError(t, err) + + mutator.DefaultMutators(ctx, b) + + changes := map[string]deployplan.Changes{ + "resources.jobs.dev_job": { + "timeout_seconds": &deployplan.ChangeDesc{ + Action: deployplan.Update, + Old: 1800, + Remote: 3600, + }, + }, + } + + fileChanges, err := GenerateYAMLFiles(ctx, b, changes) + require.NoError(t, err) + require.Len(t, fileChanges, 1) + + assert.Equal(t, devPath, fileChanges[0].Path) + assert.Contains(t, fileChanges[0].OriginalContent, "timeout_seconds: 1800") + assert.Contains(t, fileChanges[0].ModifiedContent, "timeout_seconds: 3600") + assert.NotContains(t, fileChanges[0].ModifiedContent, "timeout_seconds: 1800") +} + +func TestGenerateYAMLFiles_TargetOverride(t *testing.T) { + ctx := logdiag.InitContext(context.Background()) + + tmpDir := t.TempDir() + + mainYAML := `bundle: + name: test-bundle + +targets: + dev: + resources: + jobs: + dev_job: + name: "Dev Job" + timeout_seconds: 1800 +` + + mainPath := filepath.Join(tmpDir, "databricks.yml") + err := os.WriteFile(mainPath, []byte(mainYAML), 0o644) + require.NoError(t, err) + + b, err := bundle.Load(ctx, tmpDir) + require.NoError(t, err) + + mutator.DefaultMutators(ctx, b) + + diags := bundle.Apply(ctx, b, mutator.SelectTarget("dev")) + require.NoError(t, diags.Error()) + + changes := map[string]deployplan.Changes{ + "resources.jobs.dev_job": { + "timeout_seconds": &deployplan.ChangeDesc{ + Action: deployplan.Update, + Old: 1800, + Remote: 3600, + }, + }, + } + + fileChanges, err := GenerateYAMLFiles(ctx, b, changes) + require.NoError(t, err) + require.Len(t, fileChanges, 1) + + assert.Equal(t, mainPath, fileChanges[0].Path) + assert.Contains(t, fileChanges[0].ModifiedContent, "timeout_seconds: 3600") +} + +func TestGenerateYAMLFiles_WithStructValues(t *testing.T) { + ctx := logdiag.InitContext(context.Background()) + + tmpDir := t.TempDir() + + yamlContent := `resources: + jobs: + test_job: + name: "Test Job" + timeout_seconds: 3600 + email_notifications: + on_success: + - old@example.com +` + + yamlPath := filepath.Join(tmpDir, "databricks.yml") + err := os.WriteFile(yamlPath, []byte(yamlContent), 0o644) + require.NoError(t, err) + + b, err := bundle.Load(ctx, tmpDir) + require.NoError(t, err) + + mutator.DefaultMutators(ctx, b) + + type EmailNotifications struct { + OnSuccess []string `json:"on_success,omitempty" yaml:"on_success,omitempty"` + OnFailure []string `json:"on_failure,omitempty" yaml:"on_failure,omitempty"` + } + + changes := map[string]deployplan.Changes{ + "resources.jobs.test_job": { + "email_notifications": &deployplan.ChangeDesc{ + Action: deployplan.Update, + Remote: &EmailNotifications{ + OnSuccess: []string{"success@example.com"}, + OnFailure: []string{"failure@example.com"}, + }, + }, + }, + } + + fileChanges, err := GenerateYAMLFiles(ctx, b, changes) + require.NoError(t, err) + require.Len(t, fileChanges, 1) + + assert.Equal(t, yamlPath, fileChanges[0].Path) + assert.Contains(t, fileChanges[0].OriginalContent, "on_success:") + assert.Contains(t, fileChanges[0].OriginalContent, "old@example.com") + assert.Contains(t, fileChanges[0].ModifiedContent, "success@example.com") + assert.Contains(t, fileChanges[0].ModifiedContent, "failure@example.com") + + type JobsConfig struct { + Name string `yaml:"name"` + TimeoutSeconds int `yaml:"timeout_seconds"` + EmailNotifications *EmailNotifications `yaml:"email_notifications,omitempty"` + } + + type ResourcesConfig struct { + Jobs map[string]JobsConfig `yaml:"jobs"` + } + + type RootConfig struct { + Resources ResourcesConfig `yaml:"resources"` + } + + var result RootConfig + err = yaml.Unmarshal([]byte(fileChanges[0].ModifiedContent), &result) + require.NoError(t, err) + + testJob := result.Resources.Jobs["test_job"] + assert.Equal(t, "Test Job", testJob.Name) + assert.Equal(t, 3600, testJob.TimeoutSeconds) + require.NotNil(t, testJob.EmailNotifications) + assert.Equal(t, []string{"success@example.com"}, testJob.EmailNotifications.OnSuccess) + assert.Equal(t, []string{"failure@example.com"}, testJob.EmailNotifications.OnFailure) +} + +func TestResourceKeyToDynPath(t *testing.T) { + tests := []struct { + name string + resourceKey string + wantErr bool + wantLen int + }{ + { + name: "simple resource key", + resourceKey: "resources.jobs.my_job", + wantErr: false, + wantLen: 3, + }, + { + name: "empty resource key", + resourceKey: "", + wantErr: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + path, err := resourceKeyToDynPath(tt.resourceKey) + if tt.wantErr { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Len(t, path, tt.wantLen) + } + }) + } +} + +func TestParseResourceKey(t *testing.T) { + tests := []struct { + name string + resourceKey string + wantType string + wantName string + wantErr bool + }{ + { + name: "valid job resource", + resourceKey: "resources.jobs.my_job", + wantType: "jobs", + wantName: "my_job", + wantErr: false, + }, + { + name: "valid pipeline resource", + resourceKey: "resources.pipelines.my_pipeline", + wantType: "pipelines", + wantName: "my_pipeline", + wantErr: false, + }, + { + name: "invalid format - too few parts", + resourceKey: "resources.jobs", + wantErr: true, + }, + { + name: "invalid format - wrong prefix", + resourceKey: "targets.jobs.my_job", + wantErr: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + resourceType, resourceName, err := parseResourceKey(tt.resourceKey) + if tt.wantErr { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, tt.wantType, resourceType) + assert.Equal(t, tt.wantName, resourceName) + } + }) + } +} + +func TestApplyChangesWithEnumTypes(t *testing.T) { + ctx := context.Background() + + resource := dyn.V(map[string]dyn.Value{ + "edit_mode": dyn.V("EDITABLE"), + "name": dyn.V("test_job"), + }) + + changes := deployplan.Changes{ + "edit_mode": &deployplan.ChangeDesc{ + Remote: jobs.JobEditModeUiLocked, + }, + } + + result, err := applyChanges(ctx, resource, changes) + require.NoError(t, err) + + editMode, err := dyn.GetByPath(result, dyn.Path{dyn.Key("edit_mode")}) + require.NoError(t, err) + assert.Equal(t, dyn.KindString, editMode.Kind()) + assert.Equal(t, "UI_LOCKED", editMode.MustString()) +} + +func TestApplyChangesWithPrimitiveTypes(t *testing.T) { + ctx := context.Background() + + resource := dyn.V(map[string]dyn.Value{ + "name": dyn.V("old_name"), + "timeout": dyn.V(100), + "enabled": dyn.V(false), + "max_retries": dyn.V(1.5), + }) + + changes := deployplan.Changes{ + "name": &deployplan.ChangeDesc{ + Remote: "new_name", + }, + "timeout": &deployplan.ChangeDesc{ + Remote: int64(200), + }, + "enabled": &deployplan.ChangeDesc{ + Remote: true, + }, + "max_retries": &deployplan.ChangeDesc{ + Remote: 2.5, + }, + } + + result, err := applyChanges(ctx, resource, changes) + require.NoError(t, err) + + name, err := dyn.GetByPath(result, dyn.Path{dyn.Key("name")}) + require.NoError(t, err) + assert.Equal(t, "new_name", name.MustString()) + + timeout, err := dyn.GetByPath(result, dyn.Path{dyn.Key("timeout")}) + require.NoError(t, err) + assert.Equal(t, int64(200), timeout.MustInt()) + + enabled, err := dyn.GetByPath(result, dyn.Path{dyn.Key("enabled")}) + require.NoError(t, err) + assert.True(t, enabled.MustBool()) + + maxRetries, err := dyn.GetByPath(result, dyn.Path{dyn.Key("max_retries")}) + require.NoError(t, err) + assert.InDelta(t, 2.5, maxRetries.MustFloat(), 0.001) +} + +func TestApplyChangesWithNilValues(t *testing.T) { + ctx := context.Background() + + resource := dyn.V(map[string]dyn.Value{ + "name": dyn.V("test_job"), + "description": dyn.V("some description"), + }) + + changes := deployplan.Changes{ + "description": &deployplan.ChangeDesc{ + Remote: nil, + }, + } + + result, err := applyChanges(ctx, resource, changes) + require.NoError(t, err) + + description, err := dyn.GetByPath(result, dyn.Path{dyn.Key("description")}) + require.NoError(t, err) + assert.Equal(t, dyn.KindNil, description.Kind()) +} + +func TestApplyChangesWithStructValues(t *testing.T) { + ctx := context.Background() + + resource := dyn.V(map[string]dyn.Value{ + "name": dyn.V("test_job"), + "settings": dyn.V(map[string]dyn.Value{ + "timeout": dyn.V(100), + }), + }) + + type Settings struct { + Timeout int64 `json:"timeout"` + MaxRetries *int64 `json:"max_retries,omitempty"` + } + + maxRetries := int64(3) + changes := deployplan.Changes{ + "settings": &deployplan.ChangeDesc{ + Remote: &Settings{ + Timeout: 200, + MaxRetries: &maxRetries, + }, + }, + } + + result, err := applyChanges(ctx, resource, changes) + require.NoError(t, err) + + settings, err := dyn.GetByPath(result, dyn.Path{dyn.Key("settings")}) + require.NoError(t, err) + assert.Equal(t, dyn.KindMap, settings.Kind()) + + timeout, err := dyn.GetByPath(settings, dyn.Path{dyn.Key("timeout")}) + require.NoError(t, err) + assert.Equal(t, int64(200), timeout.MustInt()) + + retriesVal, err := dyn.GetByPath(settings, dyn.Path{dyn.Key("max_retries")}) + require.NoError(t, err) + assert.Equal(t, int64(3), retriesVal.MustInt()) +} + +func TestApplyChanges_CreatesIntermediateNodes(t *testing.T) { + ctx := context.Background() + + // Resource without tags field + resource := dyn.V(map[string]dyn.Value{ + "name": dyn.V("test_job"), + }) + + // Change that requires creating tags map + changes := deployplan.Changes{ + "tags['test']": &deployplan.ChangeDesc{ + Remote: "val", + }, + } + + result, err := applyChanges(ctx, resource, changes) + require.NoError(t, err) + + // Verify tags map was created + tags, err := dyn.GetByPath(result, dyn.Path{dyn.Key("tags")}) + require.NoError(t, err) + assert.Equal(t, dyn.KindMap, tags.Kind()) + + // Verify test key was set + testVal, err := dyn.GetByPath(result, dyn.Path{dyn.Key("tags"), dyn.Key("test")}) + require.NoError(t, err) + assert.Equal(t, "val", testVal.MustString()) +} diff --git a/cmd/bundle/debug.go b/cmd/bundle/debug.go index b912e14fe2..f0bd6c83ed 100644 --- a/cmd/bundle/debug.go +++ b/cmd/bundle/debug.go @@ -16,5 +16,6 @@ func newDebugCommand() *cobra.Command { cmd.AddCommand(debug.NewTerraformCommand()) cmd.AddCommand(debug.NewRefSchemaCommand()) cmd.AddCommand(debug.NewStatesCommand()) + cmd.AddCommand(debug.NewConfigRemoteSyncCommand()) return cmd } diff --git a/cmd/bundle/debug/config_remote_sync.go b/cmd/bundle/debug/config_remote_sync.go new file mode 100644 index 0000000000..b5e4bec503 --- /dev/null +++ b/cmd/bundle/debug/config_remote_sync.go @@ -0,0 +1,80 @@ +package debug + +import ( + "encoding/json" + "fmt" + + "github.com/databricks/cli/bundle/configsync" + "github.com/databricks/cli/cmd/bundle/utils" + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/flags" + "github.com/spf13/cobra" +) + +func NewConfigRemoteSyncCommand() *cobra.Command { + var save bool + + cmd := &cobra.Command{ + Use: "config-remote-sync", + Short: "Sync remote resource changes to bundle configuration (experimental)", + Long: `Compares deployed state with current remote state and generates updated configuration files. + +When --save is specified, writes updated YAML files to disk. +Otherwise, outputs diff without modifying files. + +Examples: + # Show diff without saving + databricks bundle debug config-remote-sync + + # Show diff and save to files + databricks bundle debug config-remote-sync --save`, + Hidden: true, // Used by DABs in the Workspace only + } + + cmd.Flags().BoolVar(&save, "save", false, "Write updated config files to disk") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { + b, _, err := utils.ProcessBundleRet(cmd, utils.ProcessOptions{}) + if err != nil { + return err + } + + ctx := cmd.Context() + changes, err := configsync.DetectChanges(ctx, b) + if err != nil { + return fmt.Errorf("failed to detect changes: %w", err) + } + + files, err := configsync.GenerateYAMLFiles(ctx, b, changes) + if err != nil { + return fmt.Errorf("failed to generate YAML files: %w", err) + } + + if save { + if err := configsync.SaveFiles(ctx, b, files); err != nil { + return fmt.Errorf("failed to save files: %w", err) + } + } + + var result []byte + if root.OutputType(cmd) == flags.OutputJSON { + diffOutput := &configsync.DiffOutput{ + Files: files, + Changes: changes, + } + result, err = json.MarshalIndent(diffOutput, "", " ") + if err != nil { + return fmt.Errorf("failed to marshal output: %w", err) + } + } else if root.OutputType(cmd) == flags.OutputText { + result = []byte(configsync.FormatTextOutput(changes)) + } + + out := cmd.OutOrStdout() + _, _ = out.Write(result) + _, _ = out.Write([]byte{'\n'}) + return nil + } + + return cmd +}