From e7cbac5cb96102a2b42b381f2e21862cd1cd24d9 Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Fri, 16 Jan 2026 15:24:44 +0100 Subject: [PATCH 01/11] WIP: Plan for replacing FieldTriggers with declarative YAML config MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- PLAN.md | 296 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 296 insertions(+) create mode 100644 PLAN.md diff --git a/PLAN.md b/PLAN.md new file mode 100644 index 0000000000..4a4fc180fe --- /dev/null +++ b/PLAN.md @@ -0,0 +1,296 @@ +# Plan: Replace FieldTriggers with Declarative YAML Config + +## Overview + +Replace the programmatic `FieldTriggers()` method on each resource with a declarative YAML configuration file (`dresources/resources.yml`) that is embedded into the binary. + +## Current State + +Currently, each resource optionally implements: +```go +func (*ResourceVolume) FieldTriggers() map[string]deployplan.ActionType { + return map[string]deployplan.ActionType{ + "catalog_name": deployplan.Recreate, + "name": deployplan.UpdateWithID, + } +} +``` + +Resources with FieldTriggers: +- `volume.go`: catalog_name, schema_name, storage_location, volume_type → Recreate; name → UpdateWithID +- `schema.go`: name, catalog_name, storage_root → Recreate +- `pipeline.go`: storage, ingestion_definition.connection_name, ingestion_definition.ingestion_gateway_id → Recreate +- `model_serving_endpoint.go`: name, description, route_optimized, auto_capture configs → Recreate +- `secret_scope_acls.go`: scope_name → UpdateWithID +- `experiment.go`: name, artifact_location → Recreate +- `model.go`: name → Recreate +- `quality_monitor.go`: output_schema_name, assets_dir → Recreate +- `registered_model.go`: catalog_name, schema_name, name → Recreate +- `app.go`: name → Recreate +- `secret_scope.go`: name, backend_type → Recreate + +## Proposed YAML Schema + +```yaml +# dresources/resources.yml +resources: + jobs: + # Example: ignore server-set fields + ignore_remote_changes: + - effective_budget_policy_id + - "tags[key='server_tag']" + + volumes: + recreate_on_changes: + - catalog_name + - schema_name + - storage_location + - volume_type + update_id_on_changes: + - name + + pipelines: + recreate_on_changes: + - storage + - ingestion_definition.connection_name + - ingestion_definition.ingestion_gateway_id + + schemas: + recreate_on_changes: + - name + - catalog_name + - storage_root + + # Permissions and grants are top-level entries + permissions: + # config for all permissions resources + + grants: + # config for all grants resources +``` + +## Pattern Syntax + +Patterns use `libs/structs/structpath` syntax: + +- **Dot notation**: `foo.bar` - nested fields +- **Bracket notation**: `['field-name']` - fields with special characters +- **Array indices**: `[0]`, `[1]` - specific array elements +- **Wildcards**: `.*` (any field) and `[*]` (any array element) +- **Key-value matching**: `[key='value']` - match array/map elements by key + +Examples: +- `name` - simple field +- `config.auto_capture_config.catalog_name` - nested field +- `tags[key='server_tag']` - specific tag by key +- `tasks[*].task_key` - all task keys in tasks array + +## Implementation Steps + +### Step 1: Define Go Types for the Config + +Create `bundle/direct/dresources/config.go`: + +```go +package dresources + +import "github.com/databricks/cli/libs/structs/structpath" + +// ResourceLifecycleConfig defines lifecycle behavior for a resource type. +type ResourceLifecycleConfig struct { + // IgnoreRemoteChanges: fields where remote changes are ignored (output-only, policy-set) + IgnoreRemoteChanges []*structpath.PathNode `yaml:"ignore_remote_changes,omitempty"` + + // IgnoreLocalChanges: local config changes will be ignored (read-only resource) + IgnoreLocalChanges bool `yaml:"ignore_local_changes,omitempty"` + + // RecreateOnChanges: field changes trigger delete + create + RecreateOnChanges []*structpath.PathNode `yaml:"recreate_on_changes,omitempty"` + + // UpdateIDOnChanges: field changes trigger UpdateWithID + UpdateIDOnChanges []*structpath.PathNode `yaml:"update_id_on_changes,omitempty"` +} + +// Config is the root configuration structure. +type Config struct { + Resources map[string]ResourceLifecycleConfig `yaml:"resources"` +} +``` + +Note: `structpath.PathNode` implements `yaml.Marshaler` and `yaml.Unmarshaler` interfaces, +enabling automatic parsing during config load with early validation of pattern syntax. + +### Step 2: Create the YAML Config File + +Create `bundle/direct/dresources/resources.yml` with all current FieldTriggers data migrated to the new format. + +### Step 3: Embed and Load the Config + +Create `bundle/direct/dresources/config_loader.go`: + +```go +package dresources + +import ( + _ "embed" + "sync" + "gopkg.in/yaml.v3" +) + +//go:embed resources.yml +var resourcesYAML []byte + +var ( + configOnce sync.Once + globalConfig *Config + configErr error +) + +func LoadConfig() (*Config, error) { + configOnce.Do(func() { + globalConfig = &Config{} + configErr = yaml.Unmarshal(resourcesYAML, globalConfig) + }) + return globalConfig, configErr +} + +func GetResourceConfig(resourceType string) *ResourceLifecycleConfig { + cfg, err := LoadConfig() + if err != nil || cfg == nil { + return nil + } + if rc, ok := cfg.Resources[resourceType]; ok { + return &rc + } + return nil +} +``` + +### Step 4: Modify Adapter to Use Config Instead of FieldTriggers + +Update `adapter.go`: + +1. Add a field to store the resource config: + ```go + type Adapter struct { + // ... existing fields ... + resourceConfig *ResourceLifecycleConfig + } + ``` + +2. In `NewAdapter`, load from config instead of calling FieldTriggers(): + ```go + // Replace FieldTriggers loading with: + adapter.resourceConfig = GetResourceConfig(resourceType) + adapter.fieldTriggers = adapter.buildFieldTriggersFromConfig() + ``` + +3. Add method to convert config to fieldTriggers map: + ```go + func (a *Adapter) buildFieldTriggersFromConfig() map[string]deployplan.ActionType { + if a.resourceConfig == nil { + return map[string]deployplan.ActionType{} + } + result := make(map[string]deployplan.ActionType) + for _, field := range a.resourceConfig.RecreateOnChanges { + result[field] = deployplan.Recreate + } + for _, field := range a.resourceConfig.UpdateIDOnChanges { + result[field] = deployplan.UpdateWithID + } + return result + } + ``` + +### Step 5: Add Pattern Matching for `ignore_remote_changes` + +Add pattern matching function to `libs/structs/structpath/path.go`: + +```go +// MatchPattern checks if a path matches a pattern with wildcards. +// Pattern can contain: +// - .* to match any single field +// - [*] to match any array index +// - [key='value'] to match specific key-value pairs +func MatchPattern(path, pattern string) bool { + // Implementation using structpath.Parse for both path and pattern, + // then comparing node by node with wildcard support +} +``` + +### Step 6: Update `addPerFieldActions` for New Config Options + +Modify `bundle_plan.go:addPerFieldActions` to handle: +- `ignore_remote_changes`: Skip action if field matches any pattern (using MatchPattern) +- `ignore_local_changes`: Skip all local changes for the resource + +### Step 7: Remove FieldTriggers Methods from Resources + +Remove `FieldTriggers()` method from all resource files: +- volume.go +- schema.go +- pipeline.go +- model_serving_endpoint.go +- secret_scope_acls.go +- experiment.go +- model.go +- quality_monitor.go +- registered_model.go +- app.go +- secret_scope.go + +### Step 8: Update IResource Interface + +Remove FieldTriggers from the IResource interface in `adapter.go`. + +### Step 9: Update Tests + +1. Update `TestFieldTriggers` in `all_test.go` to validate config loading +2. Update `TestFieldTriggersNoUpdateWhenNotImplemented` to work with new config +3. Add tests for new config features (ignore_remote_changes, etc.) +4. Add tests for pattern matching in structpath + +### Step 10: Update Validation + +The adapter validation currently checks: +- `DoUpdateWithID` is implemented if FieldTriggers has `update_with_id` +- `DoUpdateWithID` has trigger if implemented + +Update this to validate against the YAML config instead. + +## File Changes Summary + +| File | Action | +|------|--------| +| `libs/structs/structpath/path.go` | Modify - Add MatchPattern function | +| `libs/structs/structpath/path_test.go` | Modify - Add MatchPattern tests | +| `dresources/config.go` | New - Go types for config | +| `dresources/resources.yml` | New - YAML config file | +| `dresources/adapter.go` | Modify - Remove FieldTriggers loading, add config-based approach | +| `dresources/volume.go` | Modify - Remove FieldTriggers() | +| `dresources/schema.go` | Modify - Remove FieldTriggers() | +| `dresources/pipeline.go` | Modify - Remove FieldTriggers() | +| `dresources/model_serving_endpoint.go` | Modify - Remove FieldTriggers() | +| `dresources/secret_scope_acls.go` | Modify - Remove FieldTriggers() | +| `dresources/experiment.go` | Modify - Remove FieldTriggers() | +| `dresources/model.go` | Modify - Remove FieldTriggers() | +| `dresources/quality_monitor.go` | Modify - Remove FieldTriggers() | +| `dresources/registered_model.go` | Modify - Remove FieldTriggers() | +| `dresources/app.go` | Modify - Remove FieldTriggers() | +| `dresources/secret_scope.go` | Modify - Remove FieldTriggers() | +| `dresources/all_test.go` | Modify - Update tests | +| `direct/bundle_plan.go` | Modify - Handle new config options | + +## Migration Strategy + +1. First implement config loading alongside existing FieldTriggers +2. Add new config features (ignore_remote_changes, etc.) +3. Migrate all existing FieldTriggers data to YAML +4. Remove FieldTriggers methods and update interface +5. Update tests + +## Design Decisions + +1. **Permissions and grants**: Top-level entries `permissions` and `grants` in the YAML config (not nested under parent resources) +2. **Pattern syntax**: Uses `libs/structs/structpath` syntax with wildcards (`.*`, `[*]`) and key-value matching (`[key='value']`) +3. **PathNode in config**: Config uses `[]*structpath.PathNode` directly instead of `[]string` for early validation and parse-once semantics From ddee8024e82b04e7902d79941143a1131c3d4ba8 Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Fri, 16 Jan 2026 15:34:45 +0100 Subject: [PATCH 02/11] Add YAML marshalling support for structpath.PathNode MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Implement yaml.Marshaler and yaml.Unmarshaler interfaces for PathNode, enabling direct use in YAML configs with automatic parsing and validation. Tests cover: - Marshal/unmarshal of various path patterns - Error handling for invalid syntax - Roundtrip consistency - Usage in struct fields with []*PathNode - Null and empty string edge cases 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- libs/structs/structpath/path.go | 22 +++ libs/structs/structpath/path_test.go | 215 +++++++++++++++++++++++++++ 2 files changed, 237 insertions(+) diff --git a/libs/structs/structpath/path.go b/libs/structs/structpath/path.go index 58eb1cd273..d8a275b0c1 100644 --- a/libs/structs/structpath/path.go +++ b/libs/structs/structpath/path.go @@ -702,3 +702,25 @@ func HasPrefix(s, prefix string) bool { return true } + +// MarshalYAML implements yaml.Marshaler for PathNode. +func (p *PathNode) MarshalYAML() (any, error) { + return p.String(), nil +} + +// UnmarshalYAML implements yaml.Unmarshaler for PathNode. +func (p *PathNode) UnmarshalYAML(unmarshal func(any) error) error { + var s string + if err := unmarshal(&s); err != nil { + return err + } + parsed, err := Parse(s) + if err != nil { + return err + } + if parsed == nil { + return nil + } + *p = *parsed + return nil +} diff --git a/libs/structs/structpath/path_test.go b/libs/structs/structpath/path_test.go index 1cbd1a8538..b0aa9b424f 100644 --- a/libs/structs/structpath/path_test.go +++ b/libs/structs/structpath/path_test.go @@ -4,6 +4,8 @@ import ( "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" ) func TestPathNode(t *testing.T) { @@ -751,3 +753,216 @@ func TestHasPrefix(t *testing.T) { }) } } + +func TestPathNodeYAMLMarshal(t *testing.T) { + tests := []struct { + name string + node *PathNode + expected string + }{ + { + name: "simple field", + node: NewDotString(nil, "name"), + expected: "name\n", + }, + { + name: "nested path", + node: NewDotString(NewDotString(nil, "config"), "database"), + expected: "config.database\n", + }, + { + name: "path with array index", + node: NewDotString(NewIndex(NewDotString(nil, "items"), 0), "name"), + expected: "items[0].name\n", + }, + { + name: "path with key-value", + node: NewDotString(NewKeyValue(NewDotString(nil, "tags"), "key", "value"), "name"), + expected: "tags[key='value'].name\n", + }, + { + name: "nil path", + node: nil, + expected: "null\n", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + data, err := yaml.Marshal(tt.node) + require.NoError(t, err) + assert.Equal(t, tt.expected, string(data)) + }) + } +} + +func TestPathNodeYAMLUnmarshal(t *testing.T) { + tests := []struct { + name string + input string + expected string + }{ + { + name: "simple field", + input: "name", + expected: "name", + }, + { + name: "nested path", + input: "config.database.host", + expected: "config.database.host", + }, + { + name: "path with array index", + input: "items[0].name", + expected: "items[0].name", + }, + { + name: "path with wildcard", + input: "tasks[*].name", + expected: "tasks[*].name", + }, + { + name: "path with key-value", + input: "tags[key='server']", + expected: "tags[key='server']", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var node PathNode + err := yaml.Unmarshal([]byte(tt.input), &node) + require.NoError(t, err) + assert.Equal(t, tt.expected, node.String()) + }) + } +} + +// TestPathNodeYAMLNullAndEmpty tests YAML null and empty string handling. +func TestPathNodeYAMLNullAndEmpty(t *testing.T) { + type Config struct { + Path *PathNode `yaml:"path"` + } + + // Null results in nil pointer (YAML doesn't call UnmarshalYAML for null) + var config Config + err := yaml.Unmarshal([]byte("path: null"), &config) + require.NoError(t, err) + assert.Nil(t, config.Path) + + // Empty string results in allocated pointer with zero-value PathNode. + // The zero value has index=0, which represents "[0]" (array index 0). + // This is a quirk - in practice, use null for "no path" in YAML configs. + var config2 Config + err = yaml.Unmarshal([]byte("path: ''"), &config2) + require.NoError(t, err) + require.NotNil(t, config2.Path) + assert.Equal(t, "[0]", config2.Path.String()) +} + +func TestPathNodeYAMLUnmarshalErrors(t *testing.T) { + tests := []struct { + name string + input string + error string + }{ + { + name: "unclosed bracket", + input: "field[0", + error: "unexpected end of input while parsing index", + }, + { + name: "invalid character", + input: "field..name", + error: "expected field name after '.' but got '.' at position 6", + }, + { + name: "unclosed quote", + input: "field['key", + error: "unexpected end of input while parsing map key", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var node PathNode + err := yaml.Unmarshal([]byte(tt.input), &node) + require.Error(t, err) + assert.Contains(t, err.Error(), tt.error) + }) + } +} + +// TestPathNodeYAMLRoundtrip tests that marshalling and unmarshalling preserves the path. +func TestPathNodeYAMLRoundtrip(t *testing.T) { + paths := []string{ + "name", + "config.database", + "items[0].name", + "tasks[*].settings", + "tags[key='env'].value", + "resources.jobs['my-job'].tasks[0]", + } + + for _, path := range paths { + t.Run(path, func(t *testing.T) { + // Parse -> Marshal -> Unmarshal -> compare + original, err := Parse(path) + require.NoError(t, err) + + data, err := yaml.Marshal(original) + require.NoError(t, err) + + var restored PathNode + err = yaml.Unmarshal(data, &restored) + require.NoError(t, err) + + assert.Equal(t, path, restored.String()) + }) + } +} + +// TestPathNodeYAMLInStruct tests PathNode as a field in a struct. +func TestPathNodeYAMLInStruct(t *testing.T) { + type Config struct { + Paths []*PathNode `yaml:"paths"` + } + + yamlInput := ` +paths: + - name + - config.database + - items[0].value + - tags[key='env'] +` + + var config Config + err := yaml.Unmarshal([]byte(yamlInput), &config) + require.NoError(t, err) + require.Len(t, config.Paths, 4) + + assert.Equal(t, "name", config.Paths[0].String()) + assert.Equal(t, "config.database", config.Paths[1].String()) + assert.Equal(t, "items[0].value", config.Paths[2].String()) + assert.Equal(t, "tags[key='env']", config.Paths[3].String()) +} + +// TestPathNodeYAMLInStructWithErrors tests that invalid paths in YAML cause errors. +func TestPathNodeYAMLInStructWithErrors(t *testing.T) { + type Config struct { + Paths []*PathNode `yaml:"paths"` + } + + yamlInput := ` +paths: + - name + - field[invalid + - config.database +` + + var config Config + err := yaml.Unmarshal([]byte(yamlInput), &config) + require.Error(t, err) + assert.Contains(t, err.Error(), "unexpected end of input") +} From 751ccdff2045f041aaf9332a3089e2ac3dd29f8d Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Fri, 16 Jan 2026 16:07:55 +0100 Subject: [PATCH 03/11] Replace FieldTriggers with declarative YAML config MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This removes the programmatic FieldTriggers() methods from all resources and replaces them with a declarative YAML configuration file (dresources/resources.yml) that is embedded into the binary. Changes: - Add ResourceLifecycleConfig struct with RecreateOnChanges, UpdateIDOnChanges, IgnoreRemoteChanges fields using *structpath.PathNode - Add resources.yml with all existing FieldTriggers data migrated - Update Adapter to accept resourceType and load config from YAML - Update bundle_plan.go to use config directly via getActionFromConfig() - Remove FieldTriggers() from IResource interface and all resource types - Replace ReasonFieldTriggers with ReasonResourceConfig - Update tests to validate against ResourceConfig 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- bundle/deployplan/plan.go | 2 +- bundle/direct/bundle_plan.go | 31 ++++++- bundle/direct/dresources/adapter.go | 61 +++----------- bundle/direct/dresources/all.go | 8 +- bundle/direct/dresources/all_test.go | 49 +++++------- bundle/direct/dresources/app.go | 7 -- bundle/direct/dresources/config.go | 63 +++++++++++++++ bundle/direct/dresources/config_test.go | 57 +++++++++++++ bundle/direct/dresources/experiment.go | 12 --- bundle/direct/dresources/model.go | 20 ----- .../dresources/model_serving_endpoint.go | 13 --- bundle/direct/dresources/pipeline.go | 10 --- bundle/direct/dresources/quality_monitor.go | 7 -- bundle/direct/dresources/registered_model.go | 13 --- bundle/direct/dresources/resources.yml | 80 +++++++++++++++++++ bundle/direct/dresources/schema.go | 9 --- bundle/direct/dresources/secret_scope.go | 10 --- bundle/direct/dresources/secret_scope_acls.go | 9 --- bundle/direct/dresources/volume.go | 11 --- 19 files changed, 265 insertions(+), 207 deletions(-) create mode 100644 bundle/direct/dresources/config.go create mode 100644 bundle/direct/dresources/config_test.go create mode 100644 bundle/direct/dresources/resources.yml diff --git a/bundle/deployplan/plan.go b/bundle/deployplan/plan.go index d72274b16f..0ec18da261 100644 --- a/bundle/deployplan/plan.go +++ b/bundle/deployplan/plan.go @@ -91,7 +91,7 @@ const ( ReasonServerSideDefault = "server_side_default" ReasonAlias = "alias" ReasonRemoteAlreadySet = "remote_already_set" - ReasonFieldTriggers = "field_triggers" + ReasonResourceConfig = "resource_config" ReasonConfigOnly = "config_only" ) diff --git a/bundle/direct/bundle_plan.go b/bundle/direct/bundle_plan.go index bd8cfa24c3..9deeca4889 100644 --- a/bundle/direct/bundle_plan.go +++ b/bundle/direct/bundle_plan.go @@ -360,7 +360,7 @@ func prepareChanges(ctx context.Context, adapter *dresources.Adapter, localDiff, } func addPerFieldActions(ctx context.Context, adapter *dresources.Adapter, changes deployplan.Changes, remoteState any) error { - fieldTriggers := adapter.FieldTriggers() + cfg := adapter.ResourceConfig() for pathString, ch := range changes { path, err := structpath.Parse(pathString) @@ -378,10 +378,9 @@ func addPerFieldActions(ctx context.Context, adapter *dresources.Adapter, change } else if structdiff.IsEqual(ch.Remote, ch.New) { ch.Action = deployplan.Skip ch.Reason = deployplan.ReasonRemoteAlreadySet - } else if action, ok := fieldTriggers[pathString]; ok { - // TODO: should we check prefixes instead? + } else if action := getActionFromConfig(cfg, pathString); action != deployplan.Undefined { ch.Action = action - ch.Reason = deployplan.ReasonFieldTriggers + ch.Reason = deployplan.ReasonResourceConfig } else { ch.Action = deployplan.Update } @@ -395,6 +394,30 @@ func addPerFieldActions(ctx context.Context, adapter *dresources.Adapter, change return nil } +// getActionFromConfig returns the action for a field path based on resource config. +// Returns Undefined if no config applies. +func getActionFromConfig(cfg *dresources.ResourceLifecycleConfig, pathString string) deployplan.ActionType { + if cfg == nil { + return deployplan.Undefined + } + for _, p := range cfg.RecreateOnChanges { + if structpath.HasPrefix(pathString, p.String()) { + return deployplan.Recreate + } + } + for _, p := range cfg.UpdateIDOnChanges { + if structpath.HasPrefix(pathString, p.String()) { + return deployplan.UpdateWithID + } + } + for _, p := range cfg.IgnoreRemoteChanges { + if structpath.HasPrefix(pathString, p.String()) { + return deployplan.Skip + } + } + return deployplan.Undefined +} + // TODO: calling this "Local" is not right, it can resolve "id" and remote refrences for "skip" targets func (b *DeploymentBundle) LookupReferenceLocal(ctx context.Context, path *structpath.PathNode) (any, error) { // TODO: Prefix(3) assumes resources.jobs.foo but not resources.jobs.foo.permissions diff --git a/bundle/direct/dresources/adapter.go b/bundle/direct/dresources/adapter.go index 3ddeefc37e..ead1e6cfbc 100644 --- a/bundle/direct/dresources/adapter.go +++ b/bundle/direct/dresources/adapter.go @@ -4,7 +4,6 @@ import ( "context" "errors" "fmt" - "maps" "reflect" "github.com/databricks/cli/bundle/deployplan" @@ -46,14 +45,6 @@ type IResource interface { // Example: func (r *ResourceJob) DoDelete(ctx context.Context, id string) error DoDelete(ctx context.Context, id string) error - // [Optional] FieldTriggers returns actions to trigger when given fields are changed. - // Keys are field paths (e.g., "name", "catalog_name"). Values are actions. - // Unspecified changed fields default to Update. - // - // Note: these functions are called once per resource implementation initialization, - // not once per resource. - FieldTriggers() map[string]deployplan.ActionType - // [Optional] OverrideChangeDesc can implement custom logic to update a given ChangeDesc; it is run last after built-in classifiers and field triggers. OverrideChangeDesc(ctx context.Context, path *structpath.PathNode, changedesc *ChangeDesc, remoteState any) error @@ -103,11 +94,11 @@ type Adapter struct { overrideChangeDesc *calladapt.BoundCaller doResize *calladapt.BoundCaller - fieldTriggers map[string]deployplan.ActionType - keyedSlices map[string]any + resourceConfig *ResourceLifecycleConfig + keyedSlices map[string]any } -func NewAdapter(typedNil any, client *databricks.WorkspaceClient) (*Adapter, error) { +func NewAdapter(typedNil any, resourceType string, client *databricks.WorkspaceClient) (*Adapter, error) { newCall, err := prepareCallRequired(typedNil, "New") if err != nil { return nil, err @@ -132,7 +123,7 @@ func NewAdapter(typedNil any, client *databricks.WorkspaceClient) (*Adapter, err waitAfterCreate: nil, waitAfterUpdate: nil, overrideChangeDesc: nil, - fieldTriggers: map[string]deployplan.ActionType{}, + resourceConfig: GetResourceConfig(resourceType), keyedSlices: nil, } @@ -141,19 +132,6 @@ func NewAdapter(typedNil any, client *databricks.WorkspaceClient) (*Adapter, err return nil, err } - // Load optional FieldTriggers method from the unified interface - triggerCall, err := calladapt.PrepareCall(impl, calladapt.TypeOf[IResource](), "FieldTriggers") - if err != nil { - return nil, err - } - if triggerCall != nil { - // Validate FieldTriggers signature: func(bool) map[string]deployplan.ActionType - adapter.fieldTriggers, err = loadFieldTriggers(triggerCall) - if err != nil { - return nil, err - } - } - err = adapter.validate() if err != nil { return nil, err @@ -162,18 +140,6 @@ func NewAdapter(typedNil any, client *databricks.WorkspaceClient) (*Adapter, err return adapter, nil } -// loadFieldTriggers calls FieldTriggers with isLocal parameter and returns the resulting map. -func loadFieldTriggers(triggerCall *calladapt.BoundCaller) (map[string]deployplan.ActionType, error) { - outs, err := triggerCall.Call() - if err != nil || len(outs) != 1 { - return nil, fmt.Errorf("failed to call FieldTriggers(): %w", err) - } - fields := outs[0].(map[string]deployplan.ActionType) - result := make(map[string]deployplan.ActionType, len(fields)) - maps.Copy(result, fields) - return result, nil -} - // loadKeyedSlices validates and calls KeyedSlices method, returning the resulting map. func loadKeyedSlices(call *calladapt.BoundCaller) (map[string]any, error) { outs, err := call.Call() @@ -359,19 +325,14 @@ func (a *Adapter) validate() error { return err } - // FieldTriggers validation + // Validate resourceConfig consistency with DoUpdateWithID if a.overrideChangeDesc == nil { - hasUpdateWithIDTrigger := false - for _, action := range a.fieldTriggers { - if action == deployplan.UpdateWithID { - hasUpdateWithIDTrigger = true - } - } + hasUpdateWithIDTrigger := a.resourceConfig != nil && len(a.resourceConfig.UpdateIDOnChanges) > 0 if hasUpdateWithIDTrigger && a.doUpdateWithID == nil { - return errors.New("FieldTriggers includes update_with_id but DoUpdateWithID is not implemented") + return errors.New("resourceConfig has update_id_on_changes but DoUpdateWithID is not implemented") } if a.doUpdateWithID != nil && !hasUpdateWithIDTrigger { - return errors.New("DoUpdateWithID is implemented but FieldTriggers lacks update_with_id trigger") + return errors.New("DoUpdateWithID is implemented but resourceConfig lacks update_id_on_changes") } } @@ -390,8 +351,8 @@ func (a *Adapter) RemoteType() reflect.Type { return a.doRefresh.OutTypes[0] } -func (a *Adapter) FieldTriggers() map[string]deployplan.ActionType { - return a.fieldTriggers +func (a *Adapter) ResourceConfig() *ResourceLifecycleConfig { + return a.resourceConfig } func (a *Adapter) PrepareState(input any) (any, error) { @@ -539,7 +500,7 @@ func (a *Adapter) WaitAfterUpdate(ctx context.Context, newState any) (any, error return remoteState, nil } -// ClassifyChange classifies a change using custom logic or FieldTriggers. +// OverrideChangeDesc allows custom logic to override change classification. func (a *Adapter) OverrideChangeDesc(ctx context.Context, path *structpath.PathNode, change *ChangeDesc, remoteState any) error { if a.overrideChangeDesc == nil { return nil diff --git a/bundle/direct/dresources/all.go b/bundle/direct/dresources/all.go index eb37628961..3c00f6b262 100644 --- a/bundle/direct/dresources/all.go +++ b/bundle/direct/dresources/all.go @@ -48,12 +48,12 @@ var SupportedResources = map[string]any{ func InitAll(client *databricks.WorkspaceClient) (map[string]*Adapter, error) { result := make(map[string]*Adapter) - for group, resource := range SupportedResources { - adapter, err := NewAdapter(resource, client) + for resourceType, resource := range SupportedResources { + adapter, err := NewAdapter(resource, resourceType, client) if err != nil { - return nil, fmt.Errorf("%s: %w", group, err) + return nil, fmt.Errorf("%s: %w", resourceType, err) } - result[group] = adapter + result[resourceType] = adapter } return result, nil } diff --git a/bundle/direct/dresources/all_test.go b/bundle/direct/dresources/all_test.go index d6f48c812b..9357b6fc81 100644 --- a/bundle/direct/dresources/all_test.go +++ b/bundle/direct/dresources/all_test.go @@ -496,13 +496,13 @@ var testDeps = map[string]prepareWorkspace{ func TestAll(t *testing.T) { _, client := setupTestServerClient(t) - for group, resource := range SupportedResources { - t.Run(group, func(t *testing.T) { - adapter, err := NewAdapter(resource, client) + for resourceType, resource := range SupportedResources { + t.Run(resourceType, func(t *testing.T) { + adapter, err := NewAdapter(resource, resourceType, client) require.NoError(t, err) require.NotNil(t, adapter) - testCRUD(t, group, adapter, client) + testCRUD(t, resourceType, adapter, client) }) } @@ -654,35 +654,30 @@ func validateFields(t *testing.T, configType reflect.Type, fields map[string]dep } } -// TestFieldTriggers validates that all trigger keys -// exist in the corresponding ConfigType for each resource. -func TestFieldTriggers(t *testing.T) { - for resourceName, resource := range SupportedResources { - adapter, err := NewAdapter(resource, nil) +// TestResourceConfig validates that all field patterns in resource config +// exist in the corresponding StateType for each resource. +func TestResourceConfig(t *testing.T) { + for resourceType, resource := range SupportedResources { + adapter, err := NewAdapter(resource, resourceType, nil) require.NoError(t, err) - t.Run(resourceName+"_local", func(t *testing.T) { - validateFields(t, adapter.StateType(), adapter.FieldTriggers()) - }) - } -} - -// TestFieldTriggersNoUpdateWhenNotImplemented validates that resources without -// DoUpdate implementation don't produce update actions in their FieldTriggers. -func TestFieldTriggersNoUpdateWhenNotImplemented(t *testing.T) { - for resourceName, resource := range SupportedResources { - adapter, err := NewAdapter(resource, nil) - require.NoError(t, err) - - if adapter.HasDoUpdate() { + cfg := adapter.ResourceConfig() + if cfg == nil { continue } - t.Run(resourceName+"_local", func(t *testing.T) { - for field, action := range adapter.FieldTriggers() { - assert.NotEqual(t, deployplan.Update, action, - "resource %s does not implement DoUpdate but field %s triggers update action", resourceName, field) + t.Run(resourceType, func(t *testing.T) { + fieldMap := make(map[string]deployplan.ActionType) + for _, p := range cfg.RecreateOnChanges { + fieldMap[p.String()] = deployplan.Recreate + } + for _, p := range cfg.UpdateIDOnChanges { + fieldMap[p.String()] = deployplan.UpdateWithID + } + for _, p := range cfg.IgnoreRemoteChanges { + fieldMap[p.String()] = deployplan.Skip } + validateFields(t, adapter.StateType(), fieldMap) }) } } diff --git a/bundle/direct/dresources/app.go b/bundle/direct/dresources/app.go index 1cb5d2cccf..756079bf34 100644 --- a/bundle/direct/dresources/app.go +++ b/bundle/direct/dresources/app.go @@ -7,7 +7,6 @@ import ( "time" "github.com/databricks/cli/bundle/config/resources" - "github.com/databricks/cli/bundle/deployplan" "github.com/databricks/cli/libs/log" "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/apierr" @@ -91,12 +90,6 @@ func (r *ResourceApp) DoDelete(ctx context.Context, id string) error { return err } -func (*ResourceApp) FieldTriggers() map[string]deployplan.ActionType { - return map[string]deployplan.ActionType{ - "name": deployplan.Recreate, - } -} - func (r *ResourceApp) WaitAfterCreate(ctx context.Context, config *apps.App) (*apps.App, error) { return r.waitForApp(ctx, r.client, config.Name) } diff --git a/bundle/direct/dresources/config.go b/bundle/direct/dresources/config.go new file mode 100644 index 0000000000..e825dcc458 --- /dev/null +++ b/bundle/direct/dresources/config.go @@ -0,0 +1,63 @@ +package dresources + +import ( + _ "embed" + "sync" + + "github.com/databricks/cli/libs/structs/structpath" + "gopkg.in/yaml.v3" +) + +// ResourceLifecycleConfig defines lifecycle behavior for a resource type. +type ResourceLifecycleConfig struct { + // IgnoreRemoteChanges: field patterns where remote changes are ignored (output-only, policy-set). + IgnoreRemoteChanges []*structpath.PathNode `yaml:"ignore_remote_changes,omitempty"` + + // IgnoreLocalChanges: if true, local config changes will be ignored (read-only resource). + IgnoreLocalChanges bool `yaml:"ignore_local_changes,omitempty"` + + // RecreateOnChanges: field patterns that trigger delete + create when changed. + RecreateOnChanges []*structpath.PathNode `yaml:"recreate_on_changes,omitempty"` + + // UpdateIDOnChanges: field patterns that trigger UpdateWithID when changed. + UpdateIDOnChanges []*structpath.PathNode `yaml:"update_id_on_changes,omitempty"` +} + +// Config is the root configuration structure for resource lifecycle behavior. +type Config struct { + Resources map[string]ResourceLifecycleConfig `yaml:"resources"` +} + +//go:embed resources.yml +var resourcesYAML []byte + +var ( + configOnce sync.Once + globalConfig *Config + configErr error +) + +// LoadConfig loads and parses the embedded resources.yml configuration. +// The config is loaded once and cached for subsequent calls. +func LoadConfig() (*Config, error) { + configOnce.Do(func() { + globalConfig = &Config{ + Resources: nil, + } + configErr = yaml.Unmarshal(resourcesYAML, globalConfig) + }) + return globalConfig, configErr +} + +// GetResourceConfig returns the lifecycle config for a given resource type. +// Returns nil if the resource type has no configuration. +func GetResourceConfig(resourceType string) *ResourceLifecycleConfig { + cfg, err := LoadConfig() + if err != nil || cfg == nil { + return nil + } + if rc, ok := cfg.Resources[resourceType]; ok { + return &rc + } + return nil +} diff --git a/bundle/direct/dresources/config_test.go b/bundle/direct/dresources/config_test.go new file mode 100644 index 0000000000..332bf68c35 --- /dev/null +++ b/bundle/direct/dresources/config_test.go @@ -0,0 +1,57 @@ +package dresources + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestLoadConfig(t *testing.T) { + cfg, err := LoadConfig() + require.NoError(t, err) + require.NotNil(t, cfg) + require.NotNil(t, cfg.Resources) + + // Verify some known resource configurations + volumes := cfg.Resources["volumes"] + assert.Len(t, volumes.RecreateOnChanges, 4) + assert.Len(t, volumes.UpdateIDOnChanges, 1) + assert.Equal(t, "name", volumes.UpdateIDOnChanges[0].String()) + + schemas := cfg.Resources["schemas"] + assert.Len(t, schemas.RecreateOnChanges, 3) + + // Verify nested paths work + endpoints := cfg.Resources["model_serving_endpoints"] + found := false + for _, p := range endpoints.RecreateOnChanges { + if p.String() == "config.auto_capture_config.catalog_name" { + found = true + break + } + } + assert.True(t, found, "should find nested path config.auto_capture_config.catalog_name") +} + +func TestGetResourceConfig(t *testing.T) { + // Existing resource + cfg := GetResourceConfig("volumes") + require.NotNil(t, cfg) + assert.Len(t, cfg.RecreateOnChanges, 4) + + // Non-existing resource returns nil + cfg = GetResourceConfig("nonexistent") + assert.Nil(t, cfg) + + // Jobs have no config in resources.yml + cfg = GetResourceConfig("jobs") + assert.Nil(t, cfg) +} + +func TestConfigIgnoreRemoteChanges(t *testing.T) { + cfg := GetResourceConfig("experiments") + require.NotNil(t, cfg) + require.Len(t, cfg.IgnoreRemoteChanges, 1) + assert.Equal(t, "tags", cfg.IgnoreRemoteChanges[0].String()) +} diff --git a/bundle/direct/dresources/experiment.go b/bundle/direct/dresources/experiment.go index 0ee0edcd8c..2225fd7e8c 100644 --- a/bundle/direct/dresources/experiment.go +++ b/bundle/direct/dresources/experiment.go @@ -4,7 +4,6 @@ import ( "context" "github.com/databricks/cli/bundle/config/resources" - "github.com/databricks/cli/bundle/deployplan" "github.com/databricks/cli/libs/utils" "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/service/ml" @@ -71,14 +70,3 @@ func (r *ResourceExperiment) DoDelete(ctx context.Context, id string) error { ExperimentId: id, }) } - -func (*ResourceExperiment) FieldTriggers() map[string]deployplan.ActionType { - // TF implementation: https://github.com/databricks/terraform-provider-databricks/blob/6c106e8e7052bb2726148d66309fd460ed444236/mlflow/resource_mlflow_experiment.go#L22 - return map[string]deployplan.ActionType{ - "name": deployplan.Update, - "artifact_location": deployplan.Recreate, - - // Tags updates are not supported by TF. This mirrors that behaviour. - "tags": deployplan.Skip, - } -} diff --git a/bundle/direct/dresources/model.go b/bundle/direct/dresources/model.go index de6b057bd5..cb9a3c9b74 100644 --- a/bundle/direct/dresources/model.go +++ b/bundle/direct/dresources/model.go @@ -4,7 +4,6 @@ import ( "context" "github.com/databricks/cli/bundle/config/resources" - "github.com/databricks/cli/bundle/deployplan" "github.com/databricks/cli/libs/utils" "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/service/ml" @@ -107,22 +106,3 @@ func (r *ResourceMlflowModel) DoDelete(ctx context.Context, id string) error { Name: id, }) } - -func (*ResourceMlflowModel) FieldTriggers() map[string]deployplan.ActionType { - return map[string]deployplan.ActionType{ - // Recreate matches current behavior of Terraform. It is possible to rename without recreate - // but that would require dynamic select of the method during update since - // the [ml.RenameModel] needs to be called instead of [ml.UpdateModel]. - // - // We might reasonably choose to never fix this because this is a legacy resource. - "name": deployplan.Recreate, - - // Allowing updates for tags requires dynamic selection of the method since - // tags can only be updated by calling [ml.SetModelTag] or [ml.DeleteModelTag] methods. - // - // Skip annotation matches the current behavior of Terraform where tags changes are showed - // in plan but are just ignored / not applied. Since this is a legacy resource we might - // reasonably choose to not fix it here as well. - "tags": deployplan.Skip, - } -} diff --git a/bundle/direct/dresources/model_serving_endpoint.go b/bundle/direct/dresources/model_serving_endpoint.go index e8899dc678..27c98abaaa 100644 --- a/bundle/direct/dresources/model_serving_endpoint.go +++ b/bundle/direct/dresources/model_serving_endpoint.go @@ -6,7 +6,6 @@ import ( "time" "github.com/databricks/cli/bundle/config/resources" - "github.com/databricks/cli/bundle/deployplan" "github.com/databricks/cli/libs/utils" "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/service/serving" @@ -316,15 +315,3 @@ func (r *ResourceModelServingEndpoint) DoUpdate(ctx context.Context, id string, func (r *ResourceModelServingEndpoint) DoDelete(ctx context.Context, id string) error { return r.client.ServingEndpoints.DeleteByName(ctx, id) } - -func (*ResourceModelServingEndpoint) FieldTriggers() map[string]deployplan.ActionType { - // TF implementation: https://github.com/databricks/terraform-provider-databricks/blob/6c106e8e7052bb2726148d66309fd460ed444236/mlflow/resource_mlflow_experiment.go#L22 - return map[string]deployplan.ActionType{ - "name": deployplan.Recreate, - "description": deployplan.Recreate, // description is immutable, can't be updated via API - "config.auto_capture_config.catalog_name": deployplan.Recreate, - "config.auto_capture_config.schema_name": deployplan.Recreate, - "config.auto_capture_config.table_name_prefix": deployplan.Recreate, - "route_optimized": deployplan.Recreate, - } -} diff --git a/bundle/direct/dresources/pipeline.go b/bundle/direct/dresources/pipeline.go index 5ca6405b49..6e3f37853e 100644 --- a/bundle/direct/dresources/pipeline.go +++ b/bundle/direct/dresources/pipeline.go @@ -122,16 +122,6 @@ func (r *ResourcePipeline) DoDelete(ctx context.Context, id string) error { return r.client.Pipelines.DeleteByPipelineId(ctx, id) } -func (*ResourcePipeline) FieldTriggers() map[string]deployplan.ActionType { - result := map[string]deployplan.ActionType{ - "storage": deployplan.Recreate, - "ingestion_definition.connection_name": deployplan.Recreate, - "ingestion_definition.ingestion_gateway_id": deployplan.Recreate, - } - - return result -} - func (*ResourcePipeline) OverrideChangeDesc(ctx context.Context, path *structpath.PathNode, ch *ChangeDesc, _ *pipelines.GetPipelineResponse) error { if path.String() == "run_as" { if structdiff.IsEqual(ch.Old, ch.New) { diff --git a/bundle/direct/dresources/quality_monitor.go b/bundle/direct/dresources/quality_monitor.go index 28d57085b5..294c0ba598 100644 --- a/bundle/direct/dresources/quality_monitor.go +++ b/bundle/direct/dresources/quality_monitor.go @@ -116,13 +116,6 @@ func (r *ResourceQualityMonitor) DoDelete(ctx context.Context, id string) error return err } -func (*ResourceQualityMonitor) FieldTriggers() map[string]deployplan.ActionType { - return map[string]deployplan.ActionType{ - "assets_dir": deployplan.Recreate, - "table_name": deployplan.Recreate, - } -} - func (r *ResourceQualityMonitor) OverrideChangeDesc(_ context.Context, path *structpath.PathNode, change *ChangeDesc, _ *catalog.MonitorInfo) error { if path.String() == "warehouse_id" && change.Old == change.New { change.Action = deployplan.Skip diff --git a/bundle/direct/dresources/registered_model.go b/bundle/direct/dresources/registered_model.go index 458dafe873..bdfeb61fa7 100644 --- a/bundle/direct/dresources/registered_model.go +++ b/bundle/direct/dresources/registered_model.go @@ -4,7 +4,6 @@ import ( "context" "github.com/databricks/cli/bundle/config/resources" - "github.com/databricks/cli/bundle/deployplan" "github.com/databricks/cli/libs/utils" "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/service/catalog" @@ -102,15 +101,3 @@ func (r *ResourceRegisteredModel) DoDelete(ctx context.Context, id string) error FullName: id, }) } - -func (*ResourceRegisteredModel) FieldTriggers() map[string]deployplan.ActionType { - return map[string]deployplan.ActionType{ - // The name can technically be updated without recreated. We recreate for now though - // to match TF implementation. - "name": deployplan.Recreate, - - "catalog_name": deployplan.Recreate, - "schema_name": deployplan.Recreate, - "storage_location": deployplan.Recreate, - } -} diff --git a/bundle/direct/dresources/resources.yml b/bundle/direct/dresources/resources.yml new file mode 100644 index 0000000000..7e2456e17b --- /dev/null +++ b/bundle/direct/dresources/resources.yml @@ -0,0 +1,80 @@ +# Resource lifecycle configuration for Databricks Asset Bundles. +# This file defines how field changes affect resource operations. +# +# Available options: +# recreate_on_changes: fields that trigger delete + create +# update_id_on_changes: fields that trigger UpdateWithID (ID may change) +# ignore_remote_changes: fields where remote changes are ignored +# ignore_local_changes: if true, all local changes are ignored + +resources: + apps: + recreate_on_changes: + - name + + experiments: + recreate_on_changes: + - artifact_location + ignore_remote_changes: + - tags + + models: + recreate_on_changes: + - name + ignore_remote_changes: + - tags + + model_serving_endpoints: + recreate_on_changes: + - name + - description + - config.auto_capture_config.catalog_name + - config.auto_capture_config.schema_name + - config.auto_capture_config.table_name_prefix + - route_optimized + + pipelines: + recreate_on_changes: + - storage + - ingestion_definition.connection_name + - ingestion_definition.ingestion_gateway_id + + quality_monitors: + recreate_on_changes: + - assets_dir + - table_name + + registered_models: + recreate_on_changes: + - name + - catalog_name + - schema_name + - storage_location + + schemas: + recreate_on_changes: + - name + - catalog_name + - storage_root + + secret_scopes: + recreate_on_changes: + - scope + - scope_backend_type + - backend_azure_keyvault + - initial_manage_principal + + volumes: + recreate_on_changes: + - catalog_name + - schema_name + - storage_location + - volume_type + update_id_on_changes: + - name + + # Permissions for secret scopes use ResourceSecretScopeAcls + # which needs UpdateWithID when scope_name changes + secret_scopes.permissions: + update_id_on_changes: + - scope_name diff --git a/bundle/direct/dresources/schema.go b/bundle/direct/dresources/schema.go index 4ed682566f..e0e05ac0d9 100644 --- a/bundle/direct/dresources/schema.go +++ b/bundle/direct/dresources/schema.go @@ -4,7 +4,6 @@ import ( "context" "github.com/databricks/cli/bundle/config/resources" - "github.com/databricks/cli/bundle/deployplan" "github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/utils" "github.com/databricks/databricks-sdk-go" @@ -77,11 +76,3 @@ func (r *ResourceSchema) DoDelete(ctx context.Context, id string) error { ForceSendFields: nil, }) } - -func (*ResourceSchema) FieldTriggers() map[string]deployplan.ActionType { - return map[string]deployplan.ActionType{ - "name": deployplan.Recreate, - "catalog_name": deployplan.Recreate, - "storage_root": deployplan.Recreate, - } -} diff --git a/bundle/direct/dresources/secret_scope.go b/bundle/direct/dresources/secret_scope.go index 34c838b000..420424997a 100644 --- a/bundle/direct/dresources/secret_scope.go +++ b/bundle/direct/dresources/secret_scope.go @@ -5,7 +5,6 @@ import ( "fmt" "github.com/databricks/cli/bundle/config/resources" - "github.com/databricks/cli/bundle/deployplan" "github.com/databricks/cli/libs/utils" "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/service/workspace" @@ -82,12 +81,3 @@ func (r *ResourceSecretScope) DoCreate(ctx context.Context, state *SecretScopeCo func (r *ResourceSecretScope) DoDelete(ctx context.Context, id string) error { return r.client.Secrets.DeleteScopeByScope(ctx, id) } - -func (r *ResourceSecretScope) FieldTriggers() map[string]deployplan.ActionType { - return map[string]deployplan.ActionType{ - "scope": deployplan.Recreate, - "scope_backend_type": deployplan.Recreate, - "backend_azure_keyvault": deployplan.Recreate, - "initial_manage_principal": deployplan.Recreate, - } -} diff --git a/bundle/direct/dresources/secret_scope_acls.go b/bundle/direct/dresources/secret_scope_acls.go index 547de52f97..034f6c3952 100644 --- a/bundle/direct/dresources/secret_scope_acls.go +++ b/bundle/direct/dresources/secret_scope_acls.go @@ -7,7 +7,6 @@ import ( "strings" "github.com/databricks/cli/bundle/config/resources" - "github.com/databricks/cli/bundle/deployplan" "github.com/databricks/cli/libs/structs/structvar" "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/apierr" @@ -115,14 +114,6 @@ func (r *ResourceSecretScopeAcls) DoUpdate(ctx context.Context, id string, state return nil, err } -func (r *ResourceSecretScopeAcls) FieldTriggers() map[string]deployplan.ActionType { - // When scope name changes, we need a DoUpdateWithID trigger. This is necessary so that subsequent - // DoRead operations use the correct ID and we do not end up with a persistent drift. - return map[string]deployplan.ActionType{ - "scope_name": deployplan.UpdateWithID, - } -} - // Removing ACLs is a no-op, to match the behavior for permissions and grants. func (r *ResourceSecretScopeAcls) DoDelete(ctx context.Context, id string) error { return nil diff --git a/bundle/direct/dresources/volume.go b/bundle/direct/dresources/volume.go index 3912f07eb5..6d33a49e27 100644 --- a/bundle/direct/dresources/volume.go +++ b/bundle/direct/dresources/volume.go @@ -6,7 +6,6 @@ import ( "strings" "github.com/databricks/cli/bundle/config/resources" - "github.com/databricks/cli/bundle/deployplan" "github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/utils" "github.com/databricks/databricks-sdk-go" @@ -113,16 +112,6 @@ func (r *ResourceVolume) DoDelete(ctx context.Context, id string) error { return r.client.Volumes.DeleteByName(ctx, id) } -func (*ResourceVolume) FieldTriggers() map[string]deployplan.ActionType { - return map[string]deployplan.ActionType{ - "catalog_name": deployplan.Recreate, - "schema_name": deployplan.Recreate, - "storage_location": deployplan.Recreate, - "volume_type": deployplan.Recreate, - "name": deployplan.UpdateWithID, - } -} - func getNameFromID(id string) (string, error) { items := strings.Split(id, ".") if len(items) == 0 { From c293d2f95d13be1360ba8773b37800a1c4986b88 Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Fri, 16 Jan 2026 16:20:00 +0100 Subject: [PATCH 04/11] replace field_triggers with builtin_rule --- .../pipelines_recreate/out.plan_update.direct.json | 2 +- .../model_serving_endpoints/basic/out.second-plan.direct.json | 2 +- .../recreate/catalog-name/out.second-plan.direct.json | 2 +- .../recreate/name-change/out.second-plan.direct.json | 2 +- .../recreate/route-optimized/out.second-plan.direct.json | 2 +- .../recreate/schema-name/out.second-plan.direct.json | 2 +- .../recreate/table-prefix/out.second-plan.direct.json | 2 +- .../change-ingestion-definition/out.plan_recreate.direct.json | 2 +- .../change-storage/out.plan_recreate.direct.json | 2 +- .../quality_monitors/change_assets_dir/out.plan.direct.json | 2 +- .../quality_monitors/change_table_name/out.plan.direct.json | 2 +- .../resources/secret_scopes/basic/out.plan2.direct.json | 4 ++-- .../bundle/resources/volumes/change-name/out.plan.direct.json | 2 +- bundle/deployplan/plan.go | 2 +- bundle/direct/bundle_plan.go | 2 +- 15 files changed, 16 insertions(+), 16 deletions(-) diff --git a/acceptance/bundle/resource_deps/pipelines_recreate/out.plan_update.direct.json b/acceptance/bundle/resource_deps/pipelines_recreate/out.plan_update.direct.json index 219c213669..0077c82b27 100644 --- a/acceptance/bundle/resource_deps/pipelines_recreate/out.plan_update.direct.json +++ b/acceptance/bundle/resource_deps/pipelines_recreate/out.plan_update.direct.json @@ -114,7 +114,7 @@ "changes": { "storage": { "action": "recreate", - "reason": "field_triggers", + "reason": "builtin_rule", "old": "dbfs:/my-storage", "new": "dbfs:/my-new-storage", "remote": "dbfs:/my-storage" diff --git a/acceptance/bundle/resources/model_serving_endpoints/basic/out.second-plan.direct.json b/acceptance/bundle/resources/model_serving_endpoints/basic/out.second-plan.direct.json index 4bf1bd4ddd..80fa8bb462 100644 --- a/acceptance/bundle/resources/model_serving_endpoints/basic/out.second-plan.direct.json +++ b/acceptance/bundle/resources/model_serving_endpoints/basic/out.second-plan.direct.json @@ -36,7 +36,7 @@ }, "name": { "action": "recreate", - "reason": "field_triggers", + "reason": "builtin_rule", "old": "[ENDPOINT_NAME_1]", "new": "[ENDPOINT_NAME_2]", "remote": "[ENDPOINT_NAME_1]" diff --git a/acceptance/bundle/resources/model_serving_endpoints/recreate/catalog-name/out.second-plan.direct.json b/acceptance/bundle/resources/model_serving_endpoints/recreate/catalog-name/out.second-plan.direct.json index c088210dfb..adabd2fdea 100644 --- a/acceptance/bundle/resources/model_serving_endpoints/recreate/catalog-name/out.second-plan.direct.json +++ b/acceptance/bundle/resources/model_serving_endpoints/recreate/catalog-name/out.second-plan.direct.json @@ -68,7 +68,7 @@ "changes": { "config.auto_capture_config.catalog_name": { "action": "recreate", - "reason": "field_triggers", + "reason": "builtin_rule", "old": "main", "new": "other_catalog", "remote": "main" diff --git a/acceptance/bundle/resources/model_serving_endpoints/recreate/name-change/out.second-plan.direct.json b/acceptance/bundle/resources/model_serving_endpoints/recreate/name-change/out.second-plan.direct.json index 8ff0c151c6..793d1f3376 100644 --- a/acceptance/bundle/resources/model_serving_endpoints/recreate/name-change/out.second-plan.direct.json +++ b/acceptance/bundle/resources/model_serving_endpoints/recreate/name-change/out.second-plan.direct.json @@ -58,7 +58,7 @@ "changes": { "name": { "action": "recreate", - "reason": "field_triggers", + "reason": "builtin_rule", "old": "[ORIGINAL_ENDPOINT_ID]", "new": "[NEW_ENDPOINT_ID]", "remote": "[ORIGINAL_ENDPOINT_ID]" diff --git a/acceptance/bundle/resources/model_serving_endpoints/recreate/route-optimized/out.second-plan.direct.json b/acceptance/bundle/resources/model_serving_endpoints/recreate/route-optimized/out.second-plan.direct.json index 41ccbd543d..2110d843df 100644 --- a/acceptance/bundle/resources/model_serving_endpoints/recreate/route-optimized/out.second-plan.direct.json +++ b/acceptance/bundle/resources/model_serving_endpoints/recreate/route-optimized/out.second-plan.direct.json @@ -51,7 +51,7 @@ "changes": { "route_optimized": { "action": "recreate", - "reason": "field_triggers", + "reason": "builtin_rule", "old": false, "new": true, "remote": false diff --git a/acceptance/bundle/resources/model_serving_endpoints/recreate/schema-name/out.second-plan.direct.json b/acceptance/bundle/resources/model_serving_endpoints/recreate/schema-name/out.second-plan.direct.json index 8871726f08..349384d689 100644 --- a/acceptance/bundle/resources/model_serving_endpoints/recreate/schema-name/out.second-plan.direct.json +++ b/acceptance/bundle/resources/model_serving_endpoints/recreate/schema-name/out.second-plan.direct.json @@ -68,7 +68,7 @@ "changes": { "config.auto_capture_config.schema_name": { "action": "recreate", - "reason": "field_triggers", + "reason": "builtin_rule", "old": "default", "new": "other_schema", "remote": "default" diff --git a/acceptance/bundle/resources/model_serving_endpoints/recreate/table-prefix/out.second-plan.direct.json b/acceptance/bundle/resources/model_serving_endpoints/recreate/table-prefix/out.second-plan.direct.json index 332be30a1a..a8464e5833 100644 --- a/acceptance/bundle/resources/model_serving_endpoints/recreate/table-prefix/out.second-plan.direct.json +++ b/acceptance/bundle/resources/model_serving_endpoints/recreate/table-prefix/out.second-plan.direct.json @@ -68,7 +68,7 @@ "changes": { "config.auto_capture_config.table_name_prefix": { "action": "recreate", - "reason": "field_triggers", + "reason": "builtin_rule", "old": "my_table", "new": "other_table", "remote": "my_table" diff --git a/acceptance/bundle/resources/pipelines/recreate-keys/change-ingestion-definition/out.plan_recreate.direct.json b/acceptance/bundle/resources/pipelines/recreate-keys/change-ingestion-definition/out.plan_recreate.direct.json index 1f48e24871..f14155d8e7 100644 --- a/acceptance/bundle/resources/pipelines/recreate-keys/change-ingestion-definition/out.plan_recreate.direct.json +++ b/acceptance/bundle/resources/pipelines/recreate-keys/change-ingestion-definition/out.plan_recreate.direct.json @@ -65,7 +65,7 @@ "changes": { "ingestion_definition.connection_name": { "action": "recreate", - "reason": "field_triggers", + "reason": "builtin_rule", "old": "my_connection", "new": "my_new_connection", "remote": "my_connection" diff --git a/acceptance/bundle/resources/pipelines/recreate-keys/change-storage/out.plan_recreate.direct.json b/acceptance/bundle/resources/pipelines/recreate-keys/change-storage/out.plan_recreate.direct.json index 5938fb5b49..5999af588e 100644 --- a/acceptance/bundle/resources/pipelines/recreate-keys/change-storage/out.plan_recreate.direct.json +++ b/acceptance/bundle/resources/pipelines/recreate-keys/change-storage/out.plan_recreate.direct.json @@ -54,7 +54,7 @@ "changes": { "storage": { "action": "recreate", - "reason": "field_triggers", + "reason": "builtin_rule", "old": "dbfs:/pipelines/custom", "new": "dbfs:/pipelines/newcustom", "remote": "dbfs:/pipelines/custom" diff --git a/acceptance/bundle/resources/quality_monitors/change_assets_dir/out.plan.direct.json b/acceptance/bundle/resources/quality_monitors/change_assets_dir/out.plan.direct.json index 0eacfaf389..3c9be3a7c6 100644 --- a/acceptance/bundle/resources/quality_monitors/change_assets_dir/out.plan.direct.json +++ b/acceptance/bundle/resources/quality_monitors/change_assets_dir/out.plan.direct.json @@ -31,7 +31,7 @@ "changes": { "assets_dir": { "action": "recreate", - "reason": "field_triggers", + "reason": "builtin_rule", "old": "/Workspace/Users/[USERNAME]/monitor_assets_[UNIQUE_NAME]", "new": "/Workspace/Users/[USERNAME]/monitor_assets2_[UNIQUE_NAME]", "remote": "/Workspace/Users/[USERNAME]/monitor_assets_[UNIQUE_NAME]" diff --git a/acceptance/bundle/resources/quality_monitors/change_table_name/out.plan.direct.json b/acceptance/bundle/resources/quality_monitors/change_table_name/out.plan.direct.json index 785e60072b..3f93a267ce 100644 --- a/acceptance/bundle/resources/quality_monitors/change_table_name/out.plan.direct.json +++ b/acceptance/bundle/resources/quality_monitors/change_table_name/out.plan.direct.json @@ -31,7 +31,7 @@ "changes": { "table_name": { "action": "recreate", - "reason": "field_triggers", + "reason": "builtin_rule", "old": "main.qm_test_[UNIQUE_NAME].test_table", "new": "main.qm_test_[UNIQUE_NAME].test_table_2", "remote": "main.qm_test_[UNIQUE_NAME].test_table" diff --git a/acceptance/bundle/resources/secret_scopes/basic/out.plan2.direct.json b/acceptance/bundle/resources/secret_scopes/basic/out.plan2.direct.json index 9c2f065210..5841f1676b 100644 --- a/acceptance/bundle/resources/secret_scopes/basic/out.plan2.direct.json +++ b/acceptance/bundle/resources/secret_scopes/basic/out.plan2.direct.json @@ -19,7 +19,7 @@ "changes": { "scope": { "action": "recreate", - "reason": "field_triggers", + "reason": "builtin_rule", "old": "test-scope-[UNIQUE_NAME]-1", "new": "test-scope-[UNIQUE_NAME]-2", "remote": "test-scope-[UNIQUE_NAME]-1" @@ -68,7 +68,7 @@ "changes": { "scope_name": { "action": "update_id", - "reason": "field_triggers", + "reason": "builtin_rule", "old": "test-scope-[UNIQUE_NAME]-1", "new": "", "remote": "test-scope-[UNIQUE_NAME]-1" diff --git a/acceptance/bundle/resources/volumes/change-name/out.plan.direct.json b/acceptance/bundle/resources/volumes/change-name/out.plan.direct.json index 9f9742c2a1..98dfb8ff67 100644 --- a/acceptance/bundle/resources/volumes/change-name/out.plan.direct.json +++ b/acceptance/bundle/resources/volumes/change-name/out.plan.direct.json @@ -32,7 +32,7 @@ "changes": { "name": { "action": "update_id", - "reason": "field_triggers", + "reason": "builtin_rule", "old": "myvolume", "new": "mynewvolume", "remote": "myvolume" diff --git a/bundle/deployplan/plan.go b/bundle/deployplan/plan.go index 0ec18da261..b819b81686 100644 --- a/bundle/deployplan/plan.go +++ b/bundle/deployplan/plan.go @@ -91,7 +91,7 @@ const ( ReasonServerSideDefault = "server_side_default" ReasonAlias = "alias" ReasonRemoteAlreadySet = "remote_already_set" - ReasonResourceConfig = "resource_config" + ReasonBuiltinRule = "builtin_rule" ReasonConfigOnly = "config_only" ) diff --git a/bundle/direct/bundle_plan.go b/bundle/direct/bundle_plan.go index 9deeca4889..e7c59c527b 100644 --- a/bundle/direct/bundle_plan.go +++ b/bundle/direct/bundle_plan.go @@ -380,7 +380,7 @@ func addPerFieldActions(ctx context.Context, adapter *dresources.Adapter, change ch.Reason = deployplan.ReasonRemoteAlreadySet } else if action := getActionFromConfig(cfg, pathString); action != deployplan.Undefined { ch.Action = action - ch.Reason = deployplan.ReasonResourceConfig + ch.Reason = deployplan.ReasonBuiltinRule } else { ch.Action = deployplan.Update } From 1c4b333a2c8d8b10ba45b568b84602e83feaf640 Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Fri, 16 Jan 2026 16:32:13 +0100 Subject: [PATCH 05/11] reorder, bring back comments --- bundle/direct/dresources/resources.yml | 85 ++++++++++++++++++-------- 1 file changed, 60 insertions(+), 25 deletions(-) diff --git a/bundle/direct/dresources/resources.yml b/bundle/direct/dresources/resources.yml index 7e2456e17b..5726cfd59f 100644 --- a/bundle/direct/dresources/resources.yml +++ b/bundle/direct/dresources/resources.yml @@ -8,22 +8,42 @@ # ignore_local_changes: if true, all local changes are ignored resources: - apps: + # jobs: no special config + + pipelines: recreate_on_changes: - - name + - storage + - ingestion_definition.connection_name + - ingestion_definition.ingestion_gateway_id - experiments: + # Recreate matches current behavior of Terraform. It is possible to rename without recreate + # but that would require dynamic select of the method during update since + # the ml.RenameModel needs to be called instead of ml.UpdateModel. + # + # We might reasonably choose to never fix this because this is a legacy resource. + # + # Allowing updates for tags requires dynamic selection of the method since + # tags can only be updated by calling ml.SetModelTag or ml.DeleteModelTag methods. + # + # Skip annotation matches the current behavior of Terraform where tags changes are showed + # in plan but are just ignored / not applied. Since this is a legacy resource we might + # reasonably choose to not fix it here as well. + models: recreate_on_changes: - - artifact_location + - name ignore_remote_changes: - tags - models: + # TF implementation: https://github.com/databricks/terraform-provider-databricks/blob/6c106e8e7052bb2726148d66309fd460ed444236/mlflow/resource_mlflow_experiment.go#L22 + # Tags updates are not supported by TF. This mirrors that behaviour. + experiments: recreate_on_changes: - - name + - artifact_location ignore_remote_changes: - tags + # TF implementation: https://github.com/databricks/terraform-provider-databricks/blob/6c106e8e7052bb2726148d66309fd460ed444236/mlflow/resource_mlflow_experiment.go#L22 + # description is immutable, can't be updated via API model_serving_endpoints: recreate_on_changes: - name @@ -33,29 +53,42 @@ resources: - config.auto_capture_config.table_name_prefix - route_optimized - pipelines: + # The name can technically be updated without recreate. We recreate for now though + # to match TF implementation. + registered_models: recreate_on_changes: - - storage - - ingestion_definition.connection_name - - ingestion_definition.ingestion_gateway_id + - name + - catalog_name + - schema_name + - storage_location quality_monitors: recreate_on_changes: - assets_dir - table_name - registered_models: + schemas: recreate_on_changes: - name + - catalog_name + - storage_root + + volumes: + recreate_on_changes: - catalog_name - schema_name - storage_location + - volume_type + update_id_on_changes: + - name - schemas: + # clusters: no special config + + # dashboards: no special config + + apps: recreate_on_changes: - name - - catalog_name - - storage_root secret_scopes: recreate_on_changes: @@ -64,17 +97,19 @@ resources: - backend_azure_keyvault - initial_manage_principal - volumes: - recreate_on_changes: - - catalog_name - - schema_name - - storage_location - - volume_type - update_id_on_changes: - - name - - # Permissions for secret scopes use ResourceSecretScopeAcls - # which needs UpdateWithID when scope_name changes + # Permissions for secret scopes use ResourceSecretScopeAcls. + # When scope name changes, we need UpdateWithID trigger. This is necessary so that subsequent + # DoRead operations use the correct ID and we do not end up with a persistent drift. secret_scopes.permissions: update_id_on_changes: - scope_name + + # alerts: no special config + + # sql_warehouses: no special config + + # database_instances: no special config + + # database_catalogs: no special config + + # synced_database_tables: no special config From e1c0d195271f3e13f842272ed196a546f5b6fd9e Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Fri, 16 Jan 2026 16:32:25 +0100 Subject: [PATCH 06/11] rm PLAN.md --- PLAN.md | 296 -------------------------------------------------------- 1 file changed, 296 deletions(-) delete mode 100644 PLAN.md diff --git a/PLAN.md b/PLAN.md deleted file mode 100644 index 4a4fc180fe..0000000000 --- a/PLAN.md +++ /dev/null @@ -1,296 +0,0 @@ -# Plan: Replace FieldTriggers with Declarative YAML Config - -## Overview - -Replace the programmatic `FieldTriggers()` method on each resource with a declarative YAML configuration file (`dresources/resources.yml`) that is embedded into the binary. - -## Current State - -Currently, each resource optionally implements: -```go -func (*ResourceVolume) FieldTriggers() map[string]deployplan.ActionType { - return map[string]deployplan.ActionType{ - "catalog_name": deployplan.Recreate, - "name": deployplan.UpdateWithID, - } -} -``` - -Resources with FieldTriggers: -- `volume.go`: catalog_name, schema_name, storage_location, volume_type → Recreate; name → UpdateWithID -- `schema.go`: name, catalog_name, storage_root → Recreate -- `pipeline.go`: storage, ingestion_definition.connection_name, ingestion_definition.ingestion_gateway_id → Recreate -- `model_serving_endpoint.go`: name, description, route_optimized, auto_capture configs → Recreate -- `secret_scope_acls.go`: scope_name → UpdateWithID -- `experiment.go`: name, artifact_location → Recreate -- `model.go`: name → Recreate -- `quality_monitor.go`: output_schema_name, assets_dir → Recreate -- `registered_model.go`: catalog_name, schema_name, name → Recreate -- `app.go`: name → Recreate -- `secret_scope.go`: name, backend_type → Recreate - -## Proposed YAML Schema - -```yaml -# dresources/resources.yml -resources: - jobs: - # Example: ignore server-set fields - ignore_remote_changes: - - effective_budget_policy_id - - "tags[key='server_tag']" - - volumes: - recreate_on_changes: - - catalog_name - - schema_name - - storage_location - - volume_type - update_id_on_changes: - - name - - pipelines: - recreate_on_changes: - - storage - - ingestion_definition.connection_name - - ingestion_definition.ingestion_gateway_id - - schemas: - recreate_on_changes: - - name - - catalog_name - - storage_root - - # Permissions and grants are top-level entries - permissions: - # config for all permissions resources - - grants: - # config for all grants resources -``` - -## Pattern Syntax - -Patterns use `libs/structs/structpath` syntax: - -- **Dot notation**: `foo.bar` - nested fields -- **Bracket notation**: `['field-name']` - fields with special characters -- **Array indices**: `[0]`, `[1]` - specific array elements -- **Wildcards**: `.*` (any field) and `[*]` (any array element) -- **Key-value matching**: `[key='value']` - match array/map elements by key - -Examples: -- `name` - simple field -- `config.auto_capture_config.catalog_name` - nested field -- `tags[key='server_tag']` - specific tag by key -- `tasks[*].task_key` - all task keys in tasks array - -## Implementation Steps - -### Step 1: Define Go Types for the Config - -Create `bundle/direct/dresources/config.go`: - -```go -package dresources - -import "github.com/databricks/cli/libs/structs/structpath" - -// ResourceLifecycleConfig defines lifecycle behavior for a resource type. -type ResourceLifecycleConfig struct { - // IgnoreRemoteChanges: fields where remote changes are ignored (output-only, policy-set) - IgnoreRemoteChanges []*structpath.PathNode `yaml:"ignore_remote_changes,omitempty"` - - // IgnoreLocalChanges: local config changes will be ignored (read-only resource) - IgnoreLocalChanges bool `yaml:"ignore_local_changes,omitempty"` - - // RecreateOnChanges: field changes trigger delete + create - RecreateOnChanges []*structpath.PathNode `yaml:"recreate_on_changes,omitempty"` - - // UpdateIDOnChanges: field changes trigger UpdateWithID - UpdateIDOnChanges []*structpath.PathNode `yaml:"update_id_on_changes,omitempty"` -} - -// Config is the root configuration structure. -type Config struct { - Resources map[string]ResourceLifecycleConfig `yaml:"resources"` -} -``` - -Note: `structpath.PathNode` implements `yaml.Marshaler` and `yaml.Unmarshaler` interfaces, -enabling automatic parsing during config load with early validation of pattern syntax. - -### Step 2: Create the YAML Config File - -Create `bundle/direct/dresources/resources.yml` with all current FieldTriggers data migrated to the new format. - -### Step 3: Embed and Load the Config - -Create `bundle/direct/dresources/config_loader.go`: - -```go -package dresources - -import ( - _ "embed" - "sync" - "gopkg.in/yaml.v3" -) - -//go:embed resources.yml -var resourcesYAML []byte - -var ( - configOnce sync.Once - globalConfig *Config - configErr error -) - -func LoadConfig() (*Config, error) { - configOnce.Do(func() { - globalConfig = &Config{} - configErr = yaml.Unmarshal(resourcesYAML, globalConfig) - }) - return globalConfig, configErr -} - -func GetResourceConfig(resourceType string) *ResourceLifecycleConfig { - cfg, err := LoadConfig() - if err != nil || cfg == nil { - return nil - } - if rc, ok := cfg.Resources[resourceType]; ok { - return &rc - } - return nil -} -``` - -### Step 4: Modify Adapter to Use Config Instead of FieldTriggers - -Update `adapter.go`: - -1. Add a field to store the resource config: - ```go - type Adapter struct { - // ... existing fields ... - resourceConfig *ResourceLifecycleConfig - } - ``` - -2. In `NewAdapter`, load from config instead of calling FieldTriggers(): - ```go - // Replace FieldTriggers loading with: - adapter.resourceConfig = GetResourceConfig(resourceType) - adapter.fieldTriggers = adapter.buildFieldTriggersFromConfig() - ``` - -3. Add method to convert config to fieldTriggers map: - ```go - func (a *Adapter) buildFieldTriggersFromConfig() map[string]deployplan.ActionType { - if a.resourceConfig == nil { - return map[string]deployplan.ActionType{} - } - result := make(map[string]deployplan.ActionType) - for _, field := range a.resourceConfig.RecreateOnChanges { - result[field] = deployplan.Recreate - } - for _, field := range a.resourceConfig.UpdateIDOnChanges { - result[field] = deployplan.UpdateWithID - } - return result - } - ``` - -### Step 5: Add Pattern Matching for `ignore_remote_changes` - -Add pattern matching function to `libs/structs/structpath/path.go`: - -```go -// MatchPattern checks if a path matches a pattern with wildcards. -// Pattern can contain: -// - .* to match any single field -// - [*] to match any array index -// - [key='value'] to match specific key-value pairs -func MatchPattern(path, pattern string) bool { - // Implementation using structpath.Parse for both path and pattern, - // then comparing node by node with wildcard support -} -``` - -### Step 6: Update `addPerFieldActions` for New Config Options - -Modify `bundle_plan.go:addPerFieldActions` to handle: -- `ignore_remote_changes`: Skip action if field matches any pattern (using MatchPattern) -- `ignore_local_changes`: Skip all local changes for the resource - -### Step 7: Remove FieldTriggers Methods from Resources - -Remove `FieldTriggers()` method from all resource files: -- volume.go -- schema.go -- pipeline.go -- model_serving_endpoint.go -- secret_scope_acls.go -- experiment.go -- model.go -- quality_monitor.go -- registered_model.go -- app.go -- secret_scope.go - -### Step 8: Update IResource Interface - -Remove FieldTriggers from the IResource interface in `adapter.go`. - -### Step 9: Update Tests - -1. Update `TestFieldTriggers` in `all_test.go` to validate config loading -2. Update `TestFieldTriggersNoUpdateWhenNotImplemented` to work with new config -3. Add tests for new config features (ignore_remote_changes, etc.) -4. Add tests for pattern matching in structpath - -### Step 10: Update Validation - -The adapter validation currently checks: -- `DoUpdateWithID` is implemented if FieldTriggers has `update_with_id` -- `DoUpdateWithID` has trigger if implemented - -Update this to validate against the YAML config instead. - -## File Changes Summary - -| File | Action | -|------|--------| -| `libs/structs/structpath/path.go` | Modify - Add MatchPattern function | -| `libs/structs/structpath/path_test.go` | Modify - Add MatchPattern tests | -| `dresources/config.go` | New - Go types for config | -| `dresources/resources.yml` | New - YAML config file | -| `dresources/adapter.go` | Modify - Remove FieldTriggers loading, add config-based approach | -| `dresources/volume.go` | Modify - Remove FieldTriggers() | -| `dresources/schema.go` | Modify - Remove FieldTriggers() | -| `dresources/pipeline.go` | Modify - Remove FieldTriggers() | -| `dresources/model_serving_endpoint.go` | Modify - Remove FieldTriggers() | -| `dresources/secret_scope_acls.go` | Modify - Remove FieldTriggers() | -| `dresources/experiment.go` | Modify - Remove FieldTriggers() | -| `dresources/model.go` | Modify - Remove FieldTriggers() | -| `dresources/quality_monitor.go` | Modify - Remove FieldTriggers() | -| `dresources/registered_model.go` | Modify - Remove FieldTriggers() | -| `dresources/app.go` | Modify - Remove FieldTriggers() | -| `dresources/secret_scope.go` | Modify - Remove FieldTriggers() | -| `dresources/all_test.go` | Modify - Update tests | -| `direct/bundle_plan.go` | Modify - Handle new config options | - -## Migration Strategy - -1. First implement config loading alongside existing FieldTriggers -2. Add new config features (ignore_remote_changes, etc.) -3. Migrate all existing FieldTriggers data to YAML -4. Remove FieldTriggers methods and update interface -5. Update tests - -## Design Decisions - -1. **Permissions and grants**: Top-level entries `permissions` and `grants` in the YAML config (not nested under parent resources) -2. **Pattern syntax**: Uses `libs/structs/structpath` syntax with wildcards (`.*`, `[*]`) and key-value matching (`[key='value']`) -3. **PathNode in config**: Config uses `[]*structpath.PathNode` directly instead of `[]string` for early validation and parse-once semantics From 982c73eb55dd2de12c7483399c85810657d5c25f Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Fri, 16 Jan 2026 16:33:49 +0100 Subject: [PATCH 07/11] put ignore-remote-changes first --- bundle/direct/bundle_plan.go | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/bundle/direct/bundle_plan.go b/bundle/direct/bundle_plan.go index e7c59c527b..e5d0277ec1 100644 --- a/bundle/direct/bundle_plan.go +++ b/bundle/direct/bundle_plan.go @@ -400,6 +400,11 @@ func getActionFromConfig(cfg *dresources.ResourceLifecycleConfig, pathString str if cfg == nil { return deployplan.Undefined } + for _, p := range cfg.IgnoreRemoteChanges { + if structpath.HasPrefix(pathString, p.String()) { + return deployplan.Skip + } + } for _, p := range cfg.RecreateOnChanges { if structpath.HasPrefix(pathString, p.String()) { return deployplan.Recreate @@ -410,11 +415,6 @@ func getActionFromConfig(cfg *dresources.ResourceLifecycleConfig, pathString str return deployplan.UpdateWithID } } - for _, p := range cfg.IgnoreRemoteChanges { - if structpath.HasPrefix(pathString, p.String()) { - return deployplan.Skip - } - } return deployplan.Undefined } From 6d6ebf4bf440f538ef7cef108ec3616cb07836c9 Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Fri, 16 Jan 2026 16:37:33 +0100 Subject: [PATCH 08/11] reformat --- bundle/direct/dresources/resources.yml | 33 ++++++++++++-------------- 1 file changed, 15 insertions(+), 18 deletions(-) diff --git a/bundle/direct/dresources/resources.yml b/bundle/direct/dresources/resources.yml index 5726cfd59f..fd57e70e9e 100644 --- a/bundle/direct/dresources/resources.yml +++ b/bundle/direct/dresources/resources.yml @@ -16,47 +16,44 @@ resources: - ingestion_definition.connection_name - ingestion_definition.ingestion_gateway_id - # Recreate matches current behavior of Terraform. It is possible to rename without recreate - # but that would require dynamic select of the method during update since - # the ml.RenameModel needs to be called instead of ml.UpdateModel. - # - # We might reasonably choose to never fix this because this is a legacy resource. - # - # Allowing updates for tags requires dynamic selection of the method since - # tags can only be updated by calling ml.SetModelTag or ml.DeleteModelTag methods. - # - # Skip annotation matches the current behavior of Terraform where tags changes are showed - # in plan but are just ignored / not applied. Since this is a legacy resource we might - # reasonably choose to not fix it here as well. models: recreate_on_changes: + # Recreate matches current behavior of Terraform. It is possible to rename without recreate + # but that would require dynamic select of the method during update since + # the ml.RenameModel needs to be called instead of ml.UpdateModel. + # We might reasonably choose to never fix this because this is a legacy resource. - name ignore_remote_changes: + # Allowing updates for tags requires dynamic selection of the method since + # tags can only be updated by calling ml.SetModelTag or ml.DeleteModelTag methods. + # Skip annotation matches the current behavior of Terraform where tags changes are showed + # in plan but are just ignored / not applied. Since this is a legacy resource we might + # reasonably choose to not fix it here as well. - tags # TF implementation: https://github.com/databricks/terraform-provider-databricks/blob/6c106e8e7052bb2726148d66309fd460ed444236/mlflow/resource_mlflow_experiment.go#L22 - # Tags updates are not supported by TF. This mirrors that behaviour. experiments: recreate_on_changes: - artifact_location ignore_remote_changes: + # Tags updates are not supported by TF. This mirrors that behaviour. - tags # TF implementation: https://github.com/databricks/terraform-provider-databricks/blob/6c106e8e7052bb2726148d66309fd460ed444236/mlflow/resource_mlflow_experiment.go#L22 - # description is immutable, can't be updated via API model_serving_endpoints: recreate_on_changes: - name + # description is immutable, can't be updated via API - description - config.auto_capture_config.catalog_name - config.auto_capture_config.schema_name - config.auto_capture_config.table_name_prefix - route_optimized - # The name can technically be updated without recreate. We recreate for now though - # to match TF implementation. registered_models: recreate_on_changes: + # The name can technically be updated without recreate. We recreate for now though + # to match TF implementation. - name - catalog_name - schema_name @@ -98,10 +95,10 @@ resources: - initial_manage_principal # Permissions for secret scopes use ResourceSecretScopeAcls. - # When scope name changes, we need UpdateWithID trigger. This is necessary so that subsequent - # DoRead operations use the correct ID and we do not end up with a persistent drift. secret_scopes.permissions: update_id_on_changes: + # When scope name changes, we need UpdateWithID trigger. This is necessary so that subsequent + # DoRead operations use the correct ID and we do not end up with a persistent drift. - scope_name # alerts: no special config From 90566a2bdf5be9ab3d7e9d90221c17078496652f Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Fri, 16 Jan 2026 17:02:43 +0100 Subject: [PATCH 09/11] Change ignore_local_changes from bool to path array MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Update ResourceLifecycleConfig.IgnoreLocalChanges to be a list of field patterns instead of a boolean. Add models.tags to both ignore_remote_changes and ignore_local_changes since tags cannot be updated via the normal API. Also reorder resources.yml sections to match Resources struct order and move comments closer to the fields they describe. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- bundle/direct/dresources/config.go | 4 ++-- bundle/direct/dresources/resources.yml | 14 ++++++++------ 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/bundle/direct/dresources/config.go b/bundle/direct/dresources/config.go index e825dcc458..12a972f4ca 100644 --- a/bundle/direct/dresources/config.go +++ b/bundle/direct/dresources/config.go @@ -13,8 +13,8 @@ type ResourceLifecycleConfig struct { // IgnoreRemoteChanges: field patterns where remote changes are ignored (output-only, policy-set). IgnoreRemoteChanges []*structpath.PathNode `yaml:"ignore_remote_changes,omitempty"` - // IgnoreLocalChanges: if true, local config changes will be ignored (read-only resource). - IgnoreLocalChanges bool `yaml:"ignore_local_changes,omitempty"` + // IgnoreLocalChanges: field patterns where local changes are ignored (can't be updated via API). + IgnoreLocalChanges []*structpath.PathNode `yaml:"ignore_local_changes,omitempty"` // RecreateOnChanges: field patterns that trigger delete + create when changed. RecreateOnChanges []*structpath.PathNode `yaml:"recreate_on_changes,omitempty"` diff --git a/bundle/direct/dresources/resources.yml b/bundle/direct/dresources/resources.yml index fd57e70e9e..fa67266054 100644 --- a/bundle/direct/dresources/resources.yml +++ b/bundle/direct/dresources/resources.yml @@ -5,7 +5,7 @@ # recreate_on_changes: fields that trigger delete + create # update_id_on_changes: fields that trigger UpdateWithID (ID may change) # ignore_remote_changes: fields where remote changes are ignored -# ignore_local_changes: if true, all local changes are ignored +# ignore_local_changes: fields where local changes are ignored (can't be updated via API) resources: # jobs: no special config @@ -23,12 +23,14 @@ resources: # the ml.RenameModel needs to be called instead of ml.UpdateModel. # We might reasonably choose to never fix this because this is a legacy resource. - name + # Allowing updates for tags requires dynamic selection of the method since + # tags can only be updated by calling ml.SetModelTag or ml.DeleteModelTag methods. + # Skip annotation matches the current behavior of Terraform where tags changes are showed + # in plan but are just ignored / not applied. Since this is a legacy resource we might + # reasonably choose to not fix it here as well. ignore_remote_changes: - # Allowing updates for tags requires dynamic selection of the method since - # tags can only be updated by calling ml.SetModelTag or ml.DeleteModelTag methods. - # Skip annotation matches the current behavior of Terraform where tags changes are showed - # in plan but are just ignored / not applied. Since this is a legacy resource we might - # reasonably choose to not fix it here as well. + - tags + ignore_local_changes: - tags # TF implementation: https://github.com/databricks/terraform-provider-databricks/blob/6c106e8e7052bb2726148d66309fd460ed444236/mlflow/resource_mlflow_experiment.go#L22 From cd748d2af26b03db3876b4aa250dfd512ddd8ba8 Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Fri, 16 Jan 2026 17:06:50 +0100 Subject: [PATCH 10/11] Replace LoadConfig with MustLoadConfig MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Since resources.yml is embedded at compile time, parsing errors indicate a bug in the code rather than a runtime condition. Use panic instead of returning an error. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- bundle/direct/dresources/config.go | 17 ++++++++--------- bundle/direct/dresources/config_test.go | 5 ++--- 2 files changed, 10 insertions(+), 12 deletions(-) diff --git a/bundle/direct/dresources/config.go b/bundle/direct/dresources/config.go index 12a972f4ca..784a0cd965 100644 --- a/bundle/direct/dresources/config.go +++ b/bundle/direct/dresources/config.go @@ -34,28 +34,27 @@ var resourcesYAML []byte var ( configOnce sync.Once globalConfig *Config - configErr error ) -// LoadConfig loads and parses the embedded resources.yml configuration. +// MustLoadConfig loads and parses the embedded resources.yml configuration. // The config is loaded once and cached for subsequent calls. -func LoadConfig() (*Config, error) { +// Panics if the embedded YAML is invalid. +func MustLoadConfig() *Config { configOnce.Do(func() { globalConfig = &Config{ Resources: nil, } - configErr = yaml.Unmarshal(resourcesYAML, globalConfig) + if err := yaml.Unmarshal(resourcesYAML, globalConfig); err != nil { + panic(err) + } }) - return globalConfig, configErr + return globalConfig } // GetResourceConfig returns the lifecycle config for a given resource type. // Returns nil if the resource type has no configuration. func GetResourceConfig(resourceType string) *ResourceLifecycleConfig { - cfg, err := LoadConfig() - if err != nil || cfg == nil { - return nil - } + cfg := MustLoadConfig() if rc, ok := cfg.Resources[resourceType]; ok { return &rc } diff --git a/bundle/direct/dresources/config_test.go b/bundle/direct/dresources/config_test.go index 332bf68c35..20cd3cdf44 100644 --- a/bundle/direct/dresources/config_test.go +++ b/bundle/direct/dresources/config_test.go @@ -7,9 +7,8 @@ import ( "github.com/stretchr/testify/require" ) -func TestLoadConfig(t *testing.T) { - cfg, err := LoadConfig() - require.NoError(t, err) +func TestMustLoadConfig(t *testing.T) { + cfg := MustLoadConfig() require.NotNil(t, cfg) require.NotNil(t, cfg.Resources) From bba0a1e98e2081c7b84793d301ffefdfc6c42ad9 Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Fri, 16 Jan 2026 17:07:56 +0100 Subject: [PATCH 11/11] Simplify config_test.go MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- bundle/direct/dresources/config_test.go | 45 ++----------------------- 1 file changed, 3 insertions(+), 42 deletions(-) diff --git a/bundle/direct/dresources/config_test.go b/bundle/direct/dresources/config_test.go index 20cd3cdf44..8d3845dd5d 100644 --- a/bundle/direct/dresources/config_test.go +++ b/bundle/direct/dresources/config_test.go @@ -4,53 +4,14 @@ import ( "testing" "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" ) func TestMustLoadConfig(t *testing.T) { cfg := MustLoadConfig() - require.NotNil(t, cfg) - require.NotNil(t, cfg.Resources) - - // Verify some known resource configurations - volumes := cfg.Resources["volumes"] - assert.Len(t, volumes.RecreateOnChanges, 4) - assert.Len(t, volumes.UpdateIDOnChanges, 1) - assert.Equal(t, "name", volumes.UpdateIDOnChanges[0].String()) - - schemas := cfg.Resources["schemas"] - assert.Len(t, schemas.RecreateOnChanges, 3) - - // Verify nested paths work - endpoints := cfg.Resources["model_serving_endpoints"] - found := false - for _, p := range endpoints.RecreateOnChanges { - if p.String() == "config.auto_capture_config.catalog_name" { - found = true - break - } - } - assert.True(t, found, "should find nested path config.auto_capture_config.catalog_name") + assert.NotEmpty(t, cfg.Resources) } func TestGetResourceConfig(t *testing.T) { - // Existing resource - cfg := GetResourceConfig("volumes") - require.NotNil(t, cfg) - assert.Len(t, cfg.RecreateOnChanges, 4) - - // Non-existing resource returns nil - cfg = GetResourceConfig("nonexistent") - assert.Nil(t, cfg) - - // Jobs have no config in resources.yml - cfg = GetResourceConfig("jobs") - assert.Nil(t, cfg) -} - -func TestConfigIgnoreRemoteChanges(t *testing.T) { - cfg := GetResourceConfig("experiments") - require.NotNil(t, cfg) - require.Len(t, cfg.IgnoreRemoteChanges, 1) - assert.Equal(t, "tags", cfg.IgnoreRemoteChanges[0].String()) + assert.NotNil(t, GetResourceConfig("volumes")) + assert.Nil(t, GetResourceConfig("nonexistent")) }