From 2ddb691f9c3bdc8e9e200073d8578a6fd6c65523 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Wed, 20 Aug 2025 15:14:09 +0200 Subject: [PATCH 01/19] Added support for lifecycle prevent_deploy option --- .../lifecycle/prevent_deploy/databricks.yml | 16 +++++++ .../lifecycle/prevent_deploy/out.test.toml | 5 +++ .../lifecycle/prevent_deploy/output.txt | 44 +++++++++++++++++++ .../bundle/lifecycle/prevent_deploy/script | 9 ++++ .../lifecycle/prevent_deploy/test-notebook.py | 3 ++ acceptance/bundle/lifecycle/test.toml | 2 + bundle/config/resources/apps.go | 1 + bundle/config/resources/clusters.go | 1 + bundle/config/resources/dashboard.go | 1 + bundle/config/resources/database_instance.go | 1 + bundle/config/resources/job.go | 1 + bundle/config/resources/lifecycle.go | 5 +++ bundle/config/resources/mlflow_experiment.go | 1 + bundle/config/resources/mlflow_model.go | 1 + .../resources/model_serving_endpoint.go | 1 + bundle/config/resources/pipeline.go | 1 + bundle/config/resources/quality_monitor.go | 1 + bundle/config/resources/registered_model.go | 1 + bundle/config/resources/schema.go | 1 + bundle/config/resources/secret_scope.go | 2 + bundle/config/resources/sql_warehouses.go | 1 + bundle/config/resources/volume.go | 1 + bundle/deploy/terraform/tfdyn/convert_app.go | 5 +++ .../terraform/tfdyn/convert_app_test.go | 33 ++++++++++++++ .../deploy/terraform/tfdyn/convert_cluster.go | 5 +++ .../terraform/tfdyn/convert_cluster_test.go | 32 ++++++++++++++ .../terraform/tfdyn/convert_dashboard.go | 5 +++ .../terraform/tfdyn/convert_dashboard_test.go | 31 +++++++++++++ .../tfdyn/convert_database_instance.go | 5 +++ .../tfdyn/convert_database_instance_test.go | 30 +++++++++++++ .../terraform/tfdyn/convert_experiment.go | 5 +++ .../tfdyn/convert_experiment_test.go | 27 ++++++++++++ bundle/deploy/terraform/tfdyn/convert_job.go | 5 +++ .../terraform/tfdyn/convert_job_test.go | 27 ++++++++++++ .../deploy/terraform/tfdyn/convert_model.go | 5 +++ .../tfdyn/convert_model_serving_endpoint.go | 5 +++ .../convert_model_serving_endpoint_test.go | 27 ++++++++++++ .../terraform/tfdyn/convert_model_test.go | 27 ++++++++++++ .../terraform/tfdyn/convert_pipeline.go | 5 +++ .../terraform/tfdyn/convert_pipeline_test.go | 27 ++++++++++++ .../tfdyn/convert_quality_monitor.go | 5 +++ .../tfdyn/convert_quality_monitor_test.go | 29 ++++++++++++ .../tfdyn/convert_registered_model.go | 5 +++ .../tfdyn/convert_registered_model_test.go | 27 ++++++++++++ .../deploy/terraform/tfdyn/convert_schema.go | 5 +++ .../terraform/tfdyn/convert_schema_test.go | 28 ++++++++++++ .../terraform/tfdyn/convert_secret_scope.go | 6 +++ .../terraform/tfdyn/convert_sql_warehouse.go | 5 +++ .../tfdyn/convert_sql_warehouse_test.go | 27 ++++++++++++ .../deploy/terraform/tfdyn/convert_volume.go | 5 +++ .../terraform/tfdyn/convert_volume_test.go | 27 ++++++++++++ bundle/deploy/terraform/tfdyn/lifecycle.go | 20 +++++++++ 52 files changed, 595 insertions(+) create mode 100644 acceptance/bundle/lifecycle/prevent_deploy/databricks.yml create mode 100644 acceptance/bundle/lifecycle/prevent_deploy/out.test.toml create mode 100644 acceptance/bundle/lifecycle/prevent_deploy/output.txt create mode 100644 acceptance/bundle/lifecycle/prevent_deploy/script create mode 100644 acceptance/bundle/lifecycle/prevent_deploy/test-notebook.py create mode 100644 acceptance/bundle/lifecycle/test.toml create mode 100644 bundle/config/resources/lifecycle.go create mode 100644 bundle/deploy/terraform/tfdyn/lifecycle.go diff --git a/acceptance/bundle/lifecycle/prevent_deploy/databricks.yml b/acceptance/bundle/lifecycle/prevent_deploy/databricks.yml new file mode 100644 index 0000000000..1e5f1a97fe --- /dev/null +++ b/acceptance/bundle/lifecycle/prevent_deploy/databricks.yml @@ -0,0 +1,16 @@ +bundle: + name: prevent-deploy + +job: &job_base + resources: + jobs: + my_job: + name: "test-job" + tasks: + - task_key: "test-task" + notebook_task: + notebook_path: "./test-notebook.py" + lifecycle: + prevent_destroy: true + +<<: *job_base diff --git a/acceptance/bundle/lifecycle/prevent_deploy/out.test.toml b/acceptance/bundle/lifecycle/prevent_deploy/out.test.toml new file mode 100644 index 0000000000..8f3575be7b --- /dev/null +++ b/acceptance/bundle/lifecycle/prevent_deploy/out.test.toml @@ -0,0 +1,5 @@ +Local = true +Cloud = false + +[EnvMatrix] + DATABRICKS_CLI_DEPLOYMENT = ["terraform", "direct-exp"] diff --git a/acceptance/bundle/lifecycle/prevent_deploy/output.txt b/acceptance/bundle/lifecycle/prevent_deploy/output.txt new file mode 100644 index 0000000000..aac3177b20 --- /dev/null +++ b/acceptance/bundle/lifecycle/prevent_deploy/output.txt @@ -0,0 +1,44 @@ + +>>> [CLI] bundle validate +Name: prevent-deploy +Target: default +Workspace: + User: [USERNAME] + Path: /Workspace/Users/[USERNAME]/.bundle/prevent-deploy/default + +Validation OK! + +>>> [CLI] bundle deploy +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/prevent-deploy/default/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +>>> musterr [CLI] bundle destroy --auto-approve +Error: exit status 1 + +Error: Instance cannot be destroyed + + on bundle.tf.json line 15, in resource.databricks_job: + 15: "my_job": { + +Resource databricks_job.my_job has lifecycle.prevent_destroy set, but the +plan calls for this resource to be destroyed. To avoid this error and +continue with the plan, either disable lifecycle.prevent_destroy or reduce +the scope of the plan using the -target flag. + + + +Exit code (musterr): 1 + +>>> update_file.py databricks.yml <<: *job_base + +>>> musterr [CLI] bundle deploy +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/prevent-deploy/default/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +Unexpected success + +Exit code: 1 diff --git a/acceptance/bundle/lifecycle/prevent_deploy/script b/acceptance/bundle/lifecycle/prevent_deploy/script new file mode 100644 index 0000000000..0897fe3a69 --- /dev/null +++ b/acceptance/bundle/lifecycle/prevent_deploy/script @@ -0,0 +1,9 @@ +trace $CLI bundle validate + +trace $CLI bundle deploy + +trace musterr $CLI bundle destroy --auto-approve + +# Rename the job key forces DABs to re-create the job +trace update_file.py databricks.yml '<<: *job_base' '' +trace musterr $CLI bundle deploy diff --git a/acceptance/bundle/lifecycle/prevent_deploy/test-notebook.py b/acceptance/bundle/lifecycle/prevent_deploy/test-notebook.py new file mode 100644 index 0000000000..24dc150ffb --- /dev/null +++ b/acceptance/bundle/lifecycle/prevent_deploy/test-notebook.py @@ -0,0 +1,3 @@ +# Databricks notebook source + +print("Hello, World!") diff --git a/acceptance/bundle/lifecycle/test.toml b/acceptance/bundle/lifecycle/test.toml new file mode 100644 index 0000000000..7d36fb9dc1 --- /dev/null +++ b/acceptance/bundle/lifecycle/test.toml @@ -0,0 +1,2 @@ +Local = true +Cloud = false diff --git a/bundle/config/resources/apps.go b/bundle/config/resources/apps.go index da4f2fafd6..688116375d 100644 --- a/bundle/config/resources/apps.go +++ b/bundle/config/resources/apps.go @@ -42,6 +42,7 @@ type App struct { URL string `json:"url,omitempty" bundle:"internal"` apps.App + Lifecycle Lifecycle `json:"lifecycle"` } func (a *App) UnmarshalJSON(b []byte) error { diff --git a/bundle/config/resources/clusters.go b/bundle/config/resources/clusters.go index ed94bb2e37..36403c6bef 100644 --- a/bundle/config/resources/clusters.go +++ b/bundle/config/resources/clusters.go @@ -29,6 +29,7 @@ type Cluster struct { URL string `json:"url,omitempty" bundle:"internal"` compute.ClusterSpec + Lifecycle Lifecycle `json:"lifecycle"` } func (s *Cluster) UnmarshalJSON(b []byte) error { diff --git a/bundle/config/resources/dashboard.go b/bundle/config/resources/dashboard.go index 0c0d4c86fc..822ceb9cb2 100644 --- a/bundle/config/resources/dashboard.go +++ b/bundle/config/resources/dashboard.go @@ -51,6 +51,7 @@ type Dashboard struct { URL string `json:"url,omitempty" bundle:"internal"` DashboardConfig + Lifecycle Lifecycle `json:"lifecycle"` // FilePath points to the local `.lvdash.json` file containing the dashboard definition. // This is inlined into serialized_dashboard during deployment. The file_path is kept around diff --git a/bundle/config/resources/database_instance.go b/bundle/config/resources/database_instance.go index 1421e71a6e..977ca514d9 100644 --- a/bundle/config/resources/database_instance.go +++ b/bundle/config/resources/database_instance.go @@ -29,6 +29,7 @@ type DatabaseInstance struct { ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` database.DatabaseInstance + Lifecycle Lifecycle `json:"lifecycle"` } func (d *DatabaseInstance) Exists(ctx context.Context, w *databricks.WorkspaceClient, name string) (bool, error) { diff --git a/bundle/config/resources/job.go b/bundle/config/resources/job.go index f1ad5288ce..5d2bc29b9b 100644 --- a/bundle/config/resources/job.go +++ b/bundle/config/resources/job.go @@ -30,6 +30,7 @@ type Job struct { URL string `json:"url,omitempty" bundle:"internal"` jobs.JobSettings + Lifecycle Lifecycle `json:"lifecycle"` } func (j *Job) UnmarshalJSON(b []byte) error { diff --git a/bundle/config/resources/lifecycle.go b/bundle/config/resources/lifecycle.go new file mode 100644 index 0000000000..055b4d4d36 --- /dev/null +++ b/bundle/config/resources/lifecycle.go @@ -0,0 +1,5 @@ +package resources + +type Lifecycle struct { + PreventDestroy bool `json:"prevent_destroy,omitempty"` +} diff --git a/bundle/config/resources/mlflow_experiment.go b/bundle/config/resources/mlflow_experiment.go index 95eeccdd5b..aedfebc34e 100644 --- a/bundle/config/resources/mlflow_experiment.go +++ b/bundle/config/resources/mlflow_experiment.go @@ -29,6 +29,7 @@ type MlflowExperiment struct { URL string `json:"url,omitempty" bundle:"internal"` ml.Experiment + Lifecycle Lifecycle `json:"lifecycle"` } func (s *MlflowExperiment) UnmarshalJSON(b []byte) error { diff --git a/bundle/config/resources/mlflow_model.go b/bundle/config/resources/mlflow_model.go index 109878f9eb..40ca73f118 100644 --- a/bundle/config/resources/mlflow_model.go +++ b/bundle/config/resources/mlflow_model.go @@ -29,6 +29,7 @@ type MlflowModel struct { URL string `json:"url,omitempty" bundle:"internal"` ml.CreateModelRequest + Lifecycle Lifecycle `json:"lifecycle"` } func (s *MlflowModel) UnmarshalJSON(b []byte) error { diff --git a/bundle/config/resources/model_serving_endpoint.go b/bundle/config/resources/model_serving_endpoint.go index 93a5e01465..57f4f33aa8 100644 --- a/bundle/config/resources/model_serving_endpoint.go +++ b/bundle/config/resources/model_serving_endpoint.go @@ -26,6 +26,7 @@ type ModelServingEndpoint struct { // This represents the input args for terraform, and will get converted // to a HCL representation for CRUD serving.CreateServingEndpoint + Lifecycle Lifecycle `json:"lifecycle"` // This represents the id (ie serving_endpoint_id) that can be used // as a reference in other resources. This value is returned by terraform. diff --git a/bundle/config/resources/pipeline.go b/bundle/config/resources/pipeline.go index 9aa7ebb405..954ddb6877 100644 --- a/bundle/config/resources/pipeline.go +++ b/bundle/config/resources/pipeline.go @@ -29,6 +29,7 @@ type Pipeline struct { URL string `json:"url,omitempty" bundle:"internal"` pipelines.CreatePipeline + Lifecycle Lifecycle `json:"lifecycle"` } func (p *Pipeline) UnmarshalJSON(b []byte) error { diff --git a/bundle/config/resources/quality_monitor.go b/bundle/config/resources/quality_monitor.go index 739d498990..9e46055cae 100644 --- a/bundle/config/resources/quality_monitor.go +++ b/bundle/config/resources/quality_monitor.go @@ -21,6 +21,7 @@ type QualityMonitor struct { // This struct defines the creation payload for a monitor. catalog.CreateMonitor + Lifecycle Lifecycle `json:"lifecycle"` } func (s *QualityMonitor) UnmarshalJSON(b []byte) error { diff --git a/bundle/config/resources/registered_model.go b/bundle/config/resources/registered_model.go index f6060c6434..bd34d3958f 100644 --- a/bundle/config/resources/registered_model.go +++ b/bundle/config/resources/registered_model.go @@ -24,6 +24,7 @@ type RegisteredModel struct { // This represents the input args for terraform, and will get converted // to a HCL representation for CRUD catalog.CreateRegisteredModelRequest + Lifecycle Lifecycle `json:"lifecycle"` ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` URL string `json:"url,omitempty" bundle:"internal"` diff --git a/bundle/config/resources/schema.go b/bundle/config/resources/schema.go index a8100e5fb2..2a3538e60b 100644 --- a/bundle/config/resources/schema.go +++ b/bundle/config/resources/schema.go @@ -23,6 +23,7 @@ type Schema struct { ID string `json:"id,omitempty" bundle:"readonly"` catalog.CreateSchema + Lifecycle Lifecycle `json:"lifecycle"` ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` URL string `json:"url,omitempty" bundle:"internal"` diff --git a/bundle/config/resources/secret_scope.go b/bundle/config/resources/secret_scope.go index d46af6d81e..445a41eb0e 100644 --- a/bundle/config/resources/secret_scope.go +++ b/bundle/config/resources/secret_scope.go @@ -43,6 +43,8 @@ type SecretScope struct { BackendType workspace.ScopeBackendType `json:"backend_type,omitempty"` // The metadata for the secret scope if the type is `AZURE_KEYVAULT` KeyvaultMetadata *workspace.AzureKeyVaultSecretScopeMetadata `json:"keyvault_metadata,omitempty"` + + Lifecycle Lifecycle `json:"lifecycle"` } func (s *SecretScope) UnmarshalJSON(b []byte) error { diff --git a/bundle/config/resources/sql_warehouses.go b/bundle/config/resources/sql_warehouses.go index f43c09ff7a..920d4bf136 100644 --- a/bundle/config/resources/sql_warehouses.go +++ b/bundle/config/resources/sql_warehouses.go @@ -28,6 +28,7 @@ type SqlWarehouse struct { URL string `json:"url,omitempty" bundle:"internal"` sql.CreateWarehouseRequest + Lifecycle Lifecycle `json:"lifecycle"` } func (sw *SqlWarehouse) UnmarshalJSON(b []byte) error { diff --git a/bundle/config/resources/volume.go b/bundle/config/resources/volume.go index 4c0a09bda5..9da07005f1 100644 --- a/bundle/config/resources/volume.go +++ b/bundle/config/resources/volume.go @@ -49,6 +49,7 @@ type Volume struct { ID string `json:"id,omitempty" bundle:"readonly"` catalog.CreateVolumeRequestContent + Lifecycle Lifecycle `json:"lifecycle"` ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` URL string `json:"url,omitempty" bundle:"internal"` diff --git a/bundle/deploy/terraform/tfdyn/convert_app.go b/bundle/deploy/terraform/tfdyn/convert_app.go index b3d599f15d..b25d403766 100644 --- a/bundle/deploy/terraform/tfdyn/convert_app.go +++ b/bundle/deploy/terraform/tfdyn/convert_app.go @@ -44,6 +44,11 @@ func (appConverter) Convert(ctx context.Context, key string, vin dyn.Value, out return err } + vout, err = convertLifecycle(ctx, vout, vin.Get("lifecycle")) + if err != nil { + return err + } + // Add the converted resource to the output. out.App[key] = vout.AsAny() diff --git a/bundle/deploy/terraform/tfdyn/convert_app_test.go b/bundle/deploy/terraform/tfdyn/convert_app_test.go index f95a6419a5..d58e9ff214 100644 --- a/bundle/deploy/terraform/tfdyn/convert_app_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_app_test.go @@ -156,3 +156,36 @@ func TestConvertAppWithNoDescription(t *testing.T) { }, }, app) } + +func TestConvertAppWithLifecycle(t *testing.T) { + src := resources.App{ + SourceCodePath: "./app", + Config: map[string]any{ + "command": []string{"python", "app.py"}, + }, + App: apps.App{ + Name: "app_id", + }, + Lifecycle: resources.Lifecycle{ + PreventDestroy: true, + }, + } + + vin, err := convert.FromTyped(src, dyn.NilValue) + require.NoError(t, err) + + ctx := context.Background() + out := schema.NewResources() + err = appConverter{}.Convert(ctx, "my_app", vin, out) + require.NoError(t, err) + + // Assert equality on the app + assert.Equal(t, map[string]any{ + "name": "app_id", + "description": "", + "no_compute": true, + "lifecycle": map[string]any{ + "prevent_destroy": true, + }, + }, out.App["my_app"]) +} diff --git a/bundle/deploy/terraform/tfdyn/convert_cluster.go b/bundle/deploy/terraform/tfdyn/convert_cluster.go index 18819c00fc..e53b22a38d 100644 --- a/bundle/deploy/terraform/tfdyn/convert_cluster.go +++ b/bundle/deploy/terraform/tfdyn/convert_cluster.go @@ -35,6 +35,11 @@ func (clusterConverter) Convert(ctx context.Context, key string, vin dyn.Value, return err } + vout, err = convertLifecycle(ctx, vout, vin.Get("lifecycle")) + if err != nil { + return err + } + // Add the converted resource to the output. out.Cluster[key] = vout.AsAny() diff --git a/bundle/deploy/terraform/tfdyn/convert_cluster_test.go b/bundle/deploy/terraform/tfdyn/convert_cluster_test.go index 2e1f71f30a..df0d8b3dda 100644 --- a/bundle/deploy/terraform/tfdyn/convert_cluster_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_cluster_test.go @@ -94,3 +94,35 @@ func TestConvertCluster(t *testing.T) { }, }, out.Permissions["cluster_my_cluster"]) } + +func TestConvertClusterWithLifecycle(t *testing.T) { + src := resources.Cluster{ + ClusterSpec: compute.ClusterSpec{ + NumWorkers: 3, + SparkVersion: "13.3.x-scala2.12", + ClusterName: "cluster", + }, + Lifecycle: resources.Lifecycle{ + PreventDestroy: true, + }, + } + + vin, err := convert.FromTyped(src, dyn.NilValue) + require.NoError(t, err) + + ctx := context.Background() + out := schema.NewResources() + err = clusterConverter{}.Convert(ctx, "my_cluster", vin, out) + require.NoError(t, err) + + // Assert equality on the cluster + assert.Equal(t, map[string]any{ + "num_workers": int64(3), + "spark_version": "13.3.x-scala2.12", + "cluster_name": "cluster", + "no_wait": true, + "lifecycle": map[string]any{ + "prevent_destroy": true, + }, + }, out.Cluster["my_cluster"]) +} diff --git a/bundle/deploy/terraform/tfdyn/convert_dashboard.go b/bundle/deploy/terraform/tfdyn/convert_dashboard.go index b51f6ae445..eb9260e8ec 100644 --- a/bundle/deploy/terraform/tfdyn/convert_dashboard.go +++ b/bundle/deploy/terraform/tfdyn/convert_dashboard.go @@ -70,6 +70,11 @@ func (dashboardConverter) Convert(ctx context.Context, key string, vin dyn.Value return err } + vout, err = convertLifecycle(ctx, vout, vin.Get("lifecycle")) + if err != nil { + return err + } + // Add the converted resource to the output. out.Dashboard[key] = vout.AsAny() diff --git a/bundle/deploy/terraform/tfdyn/convert_dashboard_test.go b/bundle/deploy/terraform/tfdyn/convert_dashboard_test.go index 18a61e5c77..e4e1d4cf68 100644 --- a/bundle/deploy/terraform/tfdyn/convert_dashboard_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_dashboard_test.go @@ -112,3 +112,34 @@ func TestConvertDashboardSerializedDashboardAny(t *testing.T) { // Assert that the "file_path" is dropped. assert.NotContains(t, out.Dashboard["my_dashboard"], "file_path") } + +func TestConvertDashboardWithLifecycle(t *testing.T) { + src := resources.Dashboard{ + DashboardConfig: resources.DashboardConfig{ + Dashboard: dashboards.Dashboard{ + DisplayName: "my dashboard", + WarehouseId: "f00dcafe", + }, + }, + Lifecycle: resources.Lifecycle{ + PreventDestroy: true, + }, + } + + vin, err := convert.FromTyped(src, dyn.NilValue) + require.NoError(t, err) + + ctx := context.Background() + out := schema.NewResources() + err = dashboardConverter{}.Convert(ctx, "my_dashboard", vin, out) + require.NoError(t, err) + + // Assert equality on the dashboard + assert.Equal(t, map[string]any{ + "display_name": "my dashboard", + "warehouse_id": "f00dcafe", + "lifecycle": map[string]any{ + "prevent_destroy": true, + }, + }, out.Dashboard["my_dashboard"]) +} diff --git a/bundle/deploy/terraform/tfdyn/convert_database_instance.go b/bundle/deploy/terraform/tfdyn/convert_database_instance.go index a0b4dfc768..1dfb4dc4c9 100644 --- a/bundle/deploy/terraform/tfdyn/convert_database_instance.go +++ b/bundle/deploy/terraform/tfdyn/convert_database_instance.go @@ -27,6 +27,11 @@ func (d databaseInstanceConverter) Convert(ctx context.Context, key string, vin return err } + vout, err = convertLifecycle(ctx, vout, vin.Get("lifecycle")) + if err != nil { + return err + } + out.DatabaseInstance[key] = vout.AsAny() // Configure permissions for this resource. diff --git a/bundle/deploy/terraform/tfdyn/convert_database_instance_test.go b/bundle/deploy/terraform/tfdyn/convert_database_instance_test.go index 6671282cbf..50d1c5ae86 100644 --- a/bundle/deploy/terraform/tfdyn/convert_database_instance_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_database_instance_test.go @@ -117,3 +117,33 @@ func TestConvertDatabaseInstanceWithPermissions(t *testing.T) { }, }, out.Permissions["database_instance_db_with_permissions"]) } + +func TestConvertDatabaseInstanceWithLifecycle(t *testing.T) { + src := resources.DatabaseInstance{ + DatabaseInstance: database.DatabaseInstance{ + Name: "test-db-instance", + Capacity: "CU_4", + }, + Lifecycle: resources.Lifecycle{ + PreventDestroy: true, + }, + } + + vin, err := convert.FromTyped(src, dyn.NilValue) + require.NoError(t, err) + + ctx := context.Background() + out := schema.NewResources() + err = databaseInstanceConverter{}.Convert(ctx, "my_database_instance", vin, out) + require.NoError(t, err) + + // Assert equality on the database instance + assert.Equal(t, map[string]any{ + "name": "test-db-instance", + "capacity": "CU_4", + "purge_on_delete": true, + "lifecycle": map[string]any{ + "prevent_destroy": true, + }, + }, out.DatabaseInstance["my_database_instance"]) +} diff --git a/bundle/deploy/terraform/tfdyn/convert_experiment.go b/bundle/deploy/terraform/tfdyn/convert_experiment.go index 0c129181f2..a741b4dec4 100644 --- a/bundle/deploy/terraform/tfdyn/convert_experiment.go +++ b/bundle/deploy/terraform/tfdyn/convert_experiment.go @@ -28,6 +28,11 @@ func (experimentConverter) Convert(ctx context.Context, key string, vin dyn.Valu return err } + vout, err = convertLifecycle(ctx, vout, vin.Get("lifecycle")) + if err != nil { + return err + } + // Add the converted resource to the output. out.MlflowExperiment[key] = vout.AsAny() diff --git a/bundle/deploy/terraform/tfdyn/convert_experiment_test.go b/bundle/deploy/terraform/tfdyn/convert_experiment_test.go index 44c2cd820a..8006cf94e1 100644 --- a/bundle/deploy/terraform/tfdyn/convert_experiment_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_experiment_test.go @@ -50,3 +50,30 @@ func TestConvertExperiment(t *testing.T) { }, }, out.Permissions["mlflow_experiment_my_experiment"]) } + +func TestConvertExperimentWithLifecycle(t *testing.T) { + src := resources.MlflowExperiment{ + Experiment: ml.Experiment{ + Name: "name", + }, + Lifecycle: resources.Lifecycle{ + PreventDestroy: true, + }, + } + + vin, err := convert.FromTyped(src, dyn.NilValue) + require.NoError(t, err) + + ctx := context.Background() + out := schema.NewResources() + err = experimentConverter{}.Convert(ctx, "my_experiment", vin, out) + require.NoError(t, err) + + // Assert equality on the experiment + assert.Equal(t, map[string]any{ + "name": "name", + "lifecycle": map[string]any{ + "prevent_destroy": true, + }, + }, out.MlflowExperiment["my_experiment"]) +} diff --git a/bundle/deploy/terraform/tfdyn/convert_job.go b/bundle/deploy/terraform/tfdyn/convert_job.go index e38ea36d6b..86d503b0ce 100644 --- a/bundle/deploy/terraform/tfdyn/convert_job.go +++ b/bundle/deploy/terraform/tfdyn/convert_job.go @@ -182,6 +182,11 @@ func (jobConverter) Convert(ctx context.Context, key string, vin dyn.Value, out return err } + vout, err = convertLifecycle(ctx, vout, vin.Get("lifecycle")) + if err != nil { + return err + } + // Add the converted resource to the output. out.Job[key] = vout.AsAny() diff --git a/bundle/deploy/terraform/tfdyn/convert_job_test.go b/bundle/deploy/terraform/tfdyn/convert_job_test.go index a7c506d592..5b812a127c 100644 --- a/bundle/deploy/terraform/tfdyn/convert_job_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_job_test.go @@ -286,3 +286,30 @@ func TestConvertJobApplyPolicyDefaultValues(t *testing.T) { }, }, out.Job["my_job"]) } + +func TestConvertJobWithLifecycle(t *testing.T) { + src := resources.Job{ + JobSettings: jobs.JobSettings{ + Name: "my job", + }, + Lifecycle: resources.Lifecycle{ + PreventDestroy: true, + }, + } + + vin, err := convert.FromTyped(src, dyn.NilValue) + require.NoError(t, err) + + ctx := context.Background() + out := schema.NewResources() + err = jobConverter{}.Convert(ctx, "my_job", vin, out) + require.NoError(t, err) + + // Assert equality on the job + assert.Equal(t, map[string]any{ + "name": "my job", + "lifecycle": map[string]any{ + "prevent_destroy": true, + }, + }, out.Job["my_job"]) +} diff --git a/bundle/deploy/terraform/tfdyn/convert_model.go b/bundle/deploy/terraform/tfdyn/convert_model.go index f5d7d489b6..722f3aa636 100644 --- a/bundle/deploy/terraform/tfdyn/convert_model.go +++ b/bundle/deploy/terraform/tfdyn/convert_model.go @@ -28,6 +28,11 @@ func (modelConverter) Convert(ctx context.Context, key string, vin dyn.Value, ou return err } + vout, err = convertLifecycle(ctx, vout, vin.Get("lifecycle")) + if err != nil { + return err + } + // Add the converted resource to the output. out.MlflowModel[key] = vout.AsAny() diff --git a/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint.go b/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint.go index b67e4dcc34..b9a5d95c14 100644 --- a/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint.go +++ b/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint.go @@ -28,6 +28,11 @@ func (modelServingEndpointConverter) Convert(ctx context.Context, key string, vi return err } + vout, err = convertLifecycle(ctx, vout, vin.Get("lifecycle")) + if err != nil { + return err + } + // Add the converted resource to the output. out.ModelServing[key] = vout.AsAny() diff --git a/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint_test.go b/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint_test.go index 029478a10a..f408d6305b 100644 --- a/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint_test.go @@ -86,3 +86,30 @@ func TestConvertModelServingEndpoint(t *testing.T) { }, }, out.Permissions["model_serving_my_model_serving_endpoint"]) } + +func TestConvertModelServingEndpointWithLifecycle(t *testing.T) { + src := resources.ModelServingEndpoint{ + CreateServingEndpoint: serving.CreateServingEndpoint{ + Name: "name", + }, + Lifecycle: resources.Lifecycle{ + PreventDestroy: true, + }, + } + + vin, err := convert.FromTyped(src, dyn.NilValue) + require.NoError(t, err) + + ctx := context.Background() + out := schema.NewResources() + err = modelServingEndpointConverter{}.Convert(ctx, "my_model_serving_endpoint", vin, out) + require.NoError(t, err) + + // Assert equality on the model serving endpoint + assert.Equal(t, map[string]any{ + "name": "name", + "lifecycle": map[string]any{ + "prevent_destroy": true, + }, + }, out.ModelServing["my_model_serving_endpoint"]) +} diff --git a/bundle/deploy/terraform/tfdyn/convert_model_test.go b/bundle/deploy/terraform/tfdyn/convert_model_test.go index 0b36034514..a395c1cb29 100644 --- a/bundle/deploy/terraform/tfdyn/convert_model_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_model_test.go @@ -72,3 +72,30 @@ func TestConvertModel(t *testing.T) { }, }, out.Permissions["mlflow_model_my_model"]) } + +func TestConvertModelWithLifecycle(t *testing.T) { + src := resources.MlflowModel{ + CreateModelRequest: ml.CreateModelRequest{ + Name: "name", + }, + Lifecycle: resources.Lifecycle{ + PreventDestroy: true, + }, + } + + vin, err := convert.FromTyped(src, dyn.NilValue) + require.NoError(t, err) + + ctx := context.Background() + out := schema.NewResources() + err = modelConverter{}.Convert(ctx, "my_model", vin, out) + require.NoError(t, err) + + // Assert equality on the model + assert.Equal(t, map[string]any{ + "name": "name", + "lifecycle": map[string]any{ + "prevent_destroy": true, + }, + }, out.MlflowModel["my_model"]) +} diff --git a/bundle/deploy/terraform/tfdyn/convert_pipeline.go b/bundle/deploy/terraform/tfdyn/convert_pipeline.go index d2df60fa28..944ce0858c 100644 --- a/bundle/deploy/terraform/tfdyn/convert_pipeline.go +++ b/bundle/deploy/terraform/tfdyn/convert_pipeline.go @@ -43,6 +43,11 @@ func (pipelineConverter) Convert(ctx context.Context, key string, vin dyn.Value, return err } + vout, err = convertLifecycle(ctx, vout, vin.Get("lifecycle")) + if err != nil { + return err + } + // Add the converted resource to the output. out.Pipeline[key] = vout.AsAny() diff --git a/bundle/deploy/terraform/tfdyn/convert_pipeline_test.go b/bundle/deploy/terraform/tfdyn/convert_pipeline_test.go index ed6bd70a08..2fe5b0d4c4 100644 --- a/bundle/deploy/terraform/tfdyn/convert_pipeline_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_pipeline_test.go @@ -139,3 +139,30 @@ func TestConvertPipeline(t *testing.T) { }, }, out.Permissions["pipeline_my_pipeline"]) } + +func TestConvertPipelineWithLifecycle(t *testing.T) { + src := resources.Pipeline{ + CreatePipeline: pipelines.CreatePipeline{ + Name: "my pipeline", + }, + Lifecycle: resources.Lifecycle{ + PreventDestroy: true, + }, + } + + vin, err := convert.FromTyped(src, dyn.NilValue) + require.NoError(t, err) + + ctx := context.Background() + out := schema.NewResources() + err = pipelineConverter{}.Convert(ctx, "my_pipeline", vin, out) + require.NoError(t, err) + + // Assert equality on the pipeline + assert.Equal(t, map[string]any{ + "name": "my pipeline", + "lifecycle": map[string]any{ + "prevent_destroy": true, + }, + }, out.Pipeline["my_pipeline"]) +} diff --git a/bundle/deploy/terraform/tfdyn/convert_quality_monitor.go b/bundle/deploy/terraform/tfdyn/convert_quality_monitor.go index 341df7c220..3e78bacc70 100644 --- a/bundle/deploy/terraform/tfdyn/convert_quality_monitor.go +++ b/bundle/deploy/terraform/tfdyn/convert_quality_monitor.go @@ -26,6 +26,11 @@ func (qualityMonitorConverter) Convert(ctx context.Context, key string, vin dyn. return err } + vout, err = convertLifecycle(ctx, vout, vin.Get("lifecycle")) + if err != nil { + return err + } + // Add the converted resource to the output. out.QualityMonitor[key] = vout.AsAny() diff --git a/bundle/deploy/terraform/tfdyn/convert_quality_monitor_test.go b/bundle/deploy/terraform/tfdyn/convert_quality_monitor_test.go index 4e457ca3b3..8381d15ffd 100644 --- a/bundle/deploy/terraform/tfdyn/convert_quality_monitor_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_quality_monitor_test.go @@ -44,3 +44,32 @@ func TestConvertQualityMonitor(t *testing.T) { }, }, out.QualityMonitor["my_monitor"]) } + +func TestConvertQualityMonitorWithLifecycle(t *testing.T) { + src := resources.QualityMonitor{ + TableName: "test_table_name", + CreateMonitor: catalog.CreateMonitor{ + AssetsDir: "assets_dir", + }, + Lifecycle: resources.Lifecycle{ + PreventDestroy: true, + }, + } + + vin, err := convert.FromTyped(src, dyn.NilValue) + require.NoError(t, err) + + ctx := context.Background() + out := schema.NewResources() + err = qualityMonitorConverter{}.Convert(ctx, "my_monitor", vin, out) + require.NoError(t, err) + + // Assert equality on the quality monitor + assert.Equal(t, map[string]any{ + "assets_dir": "assets_dir", + "table_name": "test_table_name", + "lifecycle": map[string]any{ + "prevent_destroy": true, + }, + }, out.QualityMonitor["my_monitor"]) +} diff --git a/bundle/deploy/terraform/tfdyn/convert_registered_model.go b/bundle/deploy/terraform/tfdyn/convert_registered_model.go index 20aa596f2c..49e05b47e5 100644 --- a/bundle/deploy/terraform/tfdyn/convert_registered_model.go +++ b/bundle/deploy/terraform/tfdyn/convert_registered_model.go @@ -28,6 +28,11 @@ func (registeredModelConverter) Convert(ctx context.Context, key string, vin dyn return err } + vout, err = convertLifecycle(ctx, vout, vin.Get("lifecycle")) + if err != nil { + return err + } + // Add the converted resource to the output. out.RegisteredModel[key] = vout.AsAny() diff --git a/bundle/deploy/terraform/tfdyn/convert_registered_model_test.go b/bundle/deploy/terraform/tfdyn/convert_registered_model_test.go index 633ec3eee4..dba297b165 100644 --- a/bundle/deploy/terraform/tfdyn/convert_registered_model_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_registered_model_test.go @@ -56,3 +56,30 @@ func TestConvertRegisteredModel(t *testing.T) { }, }, out.Grants["registered_model_my_registered_model"]) } + +func TestConvertRegisteredModelWithLifecycle(t *testing.T) { + src := resources.RegisteredModel{ + CreateRegisteredModelRequest: catalog.CreateRegisteredModelRequest{ + Name: "name", + }, + Lifecycle: resources.Lifecycle{ + PreventDestroy: true, + }, + } + + vin, err := convert.FromTyped(src, dyn.NilValue) + require.NoError(t, err) + + ctx := context.Background() + out := schema.NewResources() + err = registeredModelConverter{}.Convert(ctx, "my_registered_model", vin, out) + require.NoError(t, err) + + // Assert equality on the registered model + assert.Equal(t, map[string]any{ + "name": "name", + "lifecycle": map[string]any{ + "prevent_destroy": true, + }, + }, out.RegisteredModel["my_registered_model"]) +} diff --git a/bundle/deploy/terraform/tfdyn/convert_schema.go b/bundle/deploy/terraform/tfdyn/convert_schema.go index b5e6a88c0d..33fd8aab04 100644 --- a/bundle/deploy/terraform/tfdyn/convert_schema.go +++ b/bundle/deploy/terraform/tfdyn/convert_schema.go @@ -36,6 +36,11 @@ func (schemaConverter) Convert(ctx context.Context, key string, vin dyn.Value, o return err } + vout, err = convertLifecycle(ctx, vout, vin.Get("lifecycle")) + if err != nil { + return err + } + // Add the converted resource to the output. out.Schema[key] = vout.AsAny() diff --git a/bundle/deploy/terraform/tfdyn/convert_schema_test.go b/bundle/deploy/terraform/tfdyn/convert_schema_test.go index 0f48ff66b0..dd2997acae 100644 --- a/bundle/deploy/terraform/tfdyn/convert_schema_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_schema_test.go @@ -73,3 +73,31 @@ func TestConvertSchema(t *testing.T) { }, }, out.Grants["schema_my_schema"]) } + +func TestConvertSchemaWithLifecycle(t *testing.T) { + src := resources.Schema{ + CreateSchema: catalog.CreateSchema{ + Name: "name", + }, + Lifecycle: resources.Lifecycle{ + PreventDestroy: true, + }, + } + + vin, err := convert.FromTyped(src, dyn.NilValue) + require.NoError(t, err) + + ctx := context.Background() + out := schema.NewResources() + err = schemaConverter{}.Convert(ctx, "my_schema", vin, out) + require.NoError(t, err) + + // Assert equality on the schema + assert.Equal(t, map[string]any{ + "name": "name", + "force_destroy": true, + "lifecycle": map[string]any{ + "prevent_destroy": true, + }, + }, out.Schema["my_schema"]) +} diff --git a/bundle/deploy/terraform/tfdyn/convert_secret_scope.go b/bundle/deploy/terraform/tfdyn/convert_secret_scope.go index 97ebce7bba..880ed27ce4 100644 --- a/bundle/deploy/terraform/tfdyn/convert_secret_scope.go +++ b/bundle/deploy/terraform/tfdyn/convert_secret_scope.go @@ -54,6 +54,12 @@ func (s secretScopeConverter) Convert(ctx context.Context, key string, vin dyn.V for _, diag := range diags { log.Debugf(ctx, "secret scope normalization diagnostic: %s", diag.Summary) } + + vout, err := convertLifecycle(ctx, vout, vin.Get("lifecycle")) + if err != nil { + return err + } + out.SecretScope[key] = vout.AsAny() // Configure permissions for this resource diff --git a/bundle/deploy/terraform/tfdyn/convert_sql_warehouse.go b/bundle/deploy/terraform/tfdyn/convert_sql_warehouse.go index a4f489d553..0107ba2460 100644 --- a/bundle/deploy/terraform/tfdyn/convert_sql_warehouse.go +++ b/bundle/deploy/terraform/tfdyn/convert_sql_warehouse.go @@ -28,6 +28,11 @@ func (sqlWarehouseConverter) Convert(ctx context.Context, key string, vin dyn.Va return err } + vout, err = convertLifecycle(ctx, vout, vin.Get("lifecycle")) + if err != nil { + return err + } + // Add the converted resource to the output. out.SqlEndpoint[key] = vout.AsAny() diff --git a/bundle/deploy/terraform/tfdyn/convert_sql_warehouse_test.go b/bundle/deploy/terraform/tfdyn/convert_sql_warehouse_test.go index 05e58121bf..a5ee21e2cd 100644 --- a/bundle/deploy/terraform/tfdyn/convert_sql_warehouse_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_sql_warehouse_test.go @@ -65,3 +65,30 @@ func TestConvertSqlWarehouse(t *testing.T) { "min_num_clusters": int64(1), }, sqlWarehouse) } + +func TestConvertSqlWarehouseWithLifecycle(t *testing.T) { + src := resources.SqlWarehouse{ + CreateWarehouseRequest: sql.CreateWarehouseRequest{ + Name: "test_sql_warehouse", + }, + Lifecycle: resources.Lifecycle{ + PreventDestroy: true, + }, + } + + vin, err := convert.FromTyped(src, dyn.NilValue) + require.NoError(t, err) + + ctx := context.Background() + out := schema.NewResources() + err = sqlWarehouseConverter{}.Convert(ctx, "test_sql_warehouse", vin, out) + require.NoError(t, err) + + // Assert equality on the SQL warehouse + assert.Equal(t, map[string]any{ + "name": "test_sql_warehouse", + "lifecycle": map[string]any{ + "prevent_destroy": true, + }, + }, out.SqlEndpoint["test_sql_warehouse"]) +} diff --git a/bundle/deploy/terraform/tfdyn/convert_volume.go b/bundle/deploy/terraform/tfdyn/convert_volume.go index 4211e1f9e1..287ddee0c6 100644 --- a/bundle/deploy/terraform/tfdyn/convert_volume.go +++ b/bundle/deploy/terraform/tfdyn/convert_volume.go @@ -28,6 +28,11 @@ func (volumeConverter) Convert(ctx context.Context, key string, vin dyn.Value, o return err } + vout, err = convertLifecycle(ctx, vout, vin.Get("lifecycle")) + if err != nil { + return err + } + // Add the converted resource to the output. out.Volume[key] = vout.AsAny() diff --git a/bundle/deploy/terraform/tfdyn/convert_volume_test.go b/bundle/deploy/terraform/tfdyn/convert_volume_test.go index 92c64212b9..9e76b0af62 100644 --- a/bundle/deploy/terraform/tfdyn/convert_volume_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_volume_test.go @@ -72,3 +72,30 @@ func TestConvertVolume(t *testing.T) { }, }, out.Grants["volume_my_volume"]) } + +func TestConvertVolumeWithLifecycle(t *testing.T) { + src := resources.Volume{ + CreateVolumeRequestContent: catalog.CreateVolumeRequestContent{ + Name: "name", + }, + Lifecycle: resources.Lifecycle{ + PreventDestroy: true, + }, + } + + vin, err := convert.FromTyped(src, dyn.NilValue) + require.NoError(t, err) + + ctx := context.Background() + out := schema.NewResources() + err = volumeConverter{}.Convert(ctx, "my_volume", vin, out) + require.NoError(t, err) + + // Assert equality on the volume + assert.Equal(t, map[string]any{ + "name": "name", + "lifecycle": map[string]any{ + "prevent_destroy": true, + }, + }, out.Volume["my_volume"]) +} diff --git a/bundle/deploy/terraform/tfdyn/lifecycle.go b/bundle/deploy/terraform/tfdyn/lifecycle.go new file mode 100644 index 0000000000..1600cdef2d --- /dev/null +++ b/bundle/deploy/terraform/tfdyn/lifecycle.go @@ -0,0 +1,20 @@ +package tfdyn + +import ( + "context" + + "github.com/databricks/cli/libs/dyn" +) + +func convertLifecycle(ctx context.Context, vout, vLifecycle dyn.Value) (dyn.Value, error) { + if !vLifecycle.IsValid() { + return vout, nil + } + + vout, err := dyn.Set(vout, "lifecycle", vLifecycle) + if err != nil { + return dyn.InvalidValue, err + } + + return vout, nil +} From 637a0575c96e895aca243532ea3fd90d55252877 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Tue, 2 Sep 2025 12:39:33 +0200 Subject: [PATCH 02/19] added support for prevent_destroy in direct --- .../lifecycle/prevent-destroy/databricks.yml | 20 +++++++ .../prevent-destroy/out.direct-exp.txt | 37 ++++++++++++ .../prevent-destroy/out.terraform.txt | 59 +++++++++++++++++++ .../out.test.toml | 0 .../lifecycle/prevent-destroy/output.txt | 23 ++++++++ .../bundle/lifecycle/prevent-destroy/script | 18 ++++++ .../test-notebook.py | 0 .../lifecycle/prevent-destroy/test.toml | 5 ++ .../lifecycle/prevent_deploy/databricks.yml | 16 ----- .../lifecycle/prevent_deploy/output.txt | 44 -------------- .../bundle/lifecycle/prevent_deploy/script | 9 --- .../terraform/tfdyn/convert_app_test.go | 33 +++++++++++ bundle/phases/deploy.go | 4 ++ bundle/phases/destroy.go | 5 ++ bundle/phases/plan.go | 32 ++++++++++ 15 files changed, 236 insertions(+), 69 deletions(-) create mode 100644 acceptance/bundle/lifecycle/prevent-destroy/databricks.yml create mode 100644 acceptance/bundle/lifecycle/prevent-destroy/out.direct-exp.txt create mode 100644 acceptance/bundle/lifecycle/prevent-destroy/out.terraform.txt rename acceptance/bundle/lifecycle/{prevent_deploy => prevent-destroy}/out.test.toml (100%) create mode 100644 acceptance/bundle/lifecycle/prevent-destroy/output.txt create mode 100644 acceptance/bundle/lifecycle/prevent-destroy/script rename acceptance/bundle/lifecycle/{prevent_deploy => prevent-destroy}/test-notebook.py (100%) create mode 100644 acceptance/bundle/lifecycle/prevent-destroy/test.toml delete mode 100644 acceptance/bundle/lifecycle/prevent_deploy/databricks.yml delete mode 100644 acceptance/bundle/lifecycle/prevent_deploy/output.txt delete mode 100644 acceptance/bundle/lifecycle/prevent_deploy/script create mode 100644 bundle/phases/plan.go diff --git a/acceptance/bundle/lifecycle/prevent-destroy/databricks.yml b/acceptance/bundle/lifecycle/prevent-destroy/databricks.yml new file mode 100644 index 0000000000..a8eb696cf8 --- /dev/null +++ b/acceptance/bundle/lifecycle/prevent-destroy/databricks.yml @@ -0,0 +1,20 @@ +bundle: + name: prevent-destroy + +lifecycle: &lifecycle_base + lifecycle: + prevent_destroy: true + +pipeline: &pipeline_base + resources: + pipelines: + my_pipelines: + name: "test-pipeline" + libraries: + - notebook: + path: "./test-notebook.py" + <<: *lifecycle_base + schema: test-schema + catalog: main + +<<: *pipeline_base diff --git a/acceptance/bundle/lifecycle/prevent-destroy/out.direct-exp.txt b/acceptance/bundle/lifecycle/prevent-destroy/out.direct-exp.txt new file mode 100644 index 0000000000..47b9844786 --- /dev/null +++ b/acceptance/bundle/lifecycle/prevent-destroy/out.direct-exp.txt @@ -0,0 +1,37 @@ + +>>> musterr [CLI] bundle destroy --auto-approve +Error: resource my_pipelines has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. To avoid this error, disable lifecycle.prevent_destroy + + +Exit code (musterr): 1 + +>>> musterr [CLI] bundle deploy +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/prevent-destroy/default/files... +Error: resource my_pipelines has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. To avoid this error, disable lifecycle.prevent_destroy + + +Exit code (musterr): 1 + +>>> [CLI] bundle deploy --auto-approve +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/prevent-destroy/default/files... + +This action will result in the deletion or recreation of the following Lakeflow Declarative Pipelines along with the +Streaming Tables (STs) and Materialized Views (MVs) managed by them. Recreating the pipelines will +restore the defined STs and MVs through full refresh. Note that recreation is necessary when pipeline +properties such as the 'catalog' or 'storage' are changed: + recreate pipeline my_pipelines +Deploying resources... +Updating deployment state... +Deployment complete! + +>>> [CLI] bundle deploy --auto-approve +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/prevent-destroy/default/files... + +This action will result in the deletion or recreation of the following Lakeflow Declarative Pipelines along with the +Streaming Tables (STs) and Materialized Views (MVs) managed by them. Recreating the pipelines will +restore the defined STs and MVs through full refresh. Note that recreation is necessary when pipeline +properties such as the 'catalog' or 'storage' are changed: + delete pipeline my_pipelines +Deploying resources... +Updating deployment state... +Deployment complete! diff --git a/acceptance/bundle/lifecycle/prevent-destroy/out.terraform.txt b/acceptance/bundle/lifecycle/prevent-destroy/out.terraform.txt new file mode 100644 index 0000000000..d67c3f84e7 --- /dev/null +++ b/acceptance/bundle/lifecycle/prevent-destroy/out.terraform.txt @@ -0,0 +1,59 @@ + +>>> musterr [CLI] bundle destroy --auto-approve +Error: exit status 1 + +Error: Instance cannot be destroyed + + on bundle.tf.json line 15, in resource.databricks_pipeline: + 15: "my_pipelines": { + +Resource databricks_pipeline.my_pipelines has lifecycle.prevent_destroy set, +but the plan calls for this resource to be destroyed. To avoid this error and +continue with the plan, either disable lifecycle.prevent_destroy or reduce +the scope of the plan using the -target flag. + + + +Exit code (musterr): 1 + +>>> musterr [CLI] bundle deploy +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/prevent-destroy/default/files... +Error: exit status 1 + +Error: Instance cannot be destroyed + + on bundle.tf.json line 15, in resource.databricks_pipeline: + 15: "my_pipelines": { + +Resource databricks_pipeline.my_pipelines has lifecycle.prevent_destroy set, +but the plan calls for this resource to be destroyed. To avoid this error and +continue with the plan, either disable lifecycle.prevent_destroy or reduce +the scope of the plan using the -target flag. + + + +Exit code (musterr): 1 + +>>> [CLI] bundle deploy --auto-approve +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/prevent-destroy/default/files... + +This action will result in the deletion or recreation of the following Lakeflow Declarative Pipelines along with the +Streaming Tables (STs) and Materialized Views (MVs) managed by them. Recreating the pipelines will +restore the defined STs and MVs through full refresh. Note that recreation is necessary when pipeline +properties such as the 'catalog' or 'storage' are changed: + recreate pipeline my_pipelines +Deploying resources... +Updating deployment state... +Deployment complete! + +>>> [CLI] bundle deploy --auto-approve +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/prevent-destroy/default/files... + +This action will result in the deletion or recreation of the following Lakeflow Declarative Pipelines along with the +Streaming Tables (STs) and Materialized Views (MVs) managed by them. Recreating the pipelines will +restore the defined STs and MVs through full refresh. Note that recreation is necessary when pipeline +properties such as the 'catalog' or 'storage' are changed: + delete pipeline my_pipelines +Deploying resources... +Updating deployment state... +Deployment complete! diff --git a/acceptance/bundle/lifecycle/prevent_deploy/out.test.toml b/acceptance/bundle/lifecycle/prevent-destroy/out.test.toml similarity index 100% rename from acceptance/bundle/lifecycle/prevent_deploy/out.test.toml rename to acceptance/bundle/lifecycle/prevent-destroy/out.test.toml diff --git a/acceptance/bundle/lifecycle/prevent-destroy/output.txt b/acceptance/bundle/lifecycle/prevent-destroy/output.txt new file mode 100644 index 0000000000..ac982f8aa7 --- /dev/null +++ b/acceptance/bundle/lifecycle/prevent-destroy/output.txt @@ -0,0 +1,23 @@ + +>>> [CLI] bundle validate +Name: prevent-destroy +Target: default +Workspace: + User: [USERNAME] + Path: /Workspace/Users/[USERNAME]/.bundle/prevent-destroy/default + +Validation OK! + +>>> [CLI] bundle deploy +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/prevent-destroy/default/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +>>> update_file.py databricks.yml catalog: main catalog: mainnew + +>>> update_file.py databricks.yml prevent_destroy: true prevent_destroy: false + +>>> update_file.py databricks.yml prevent_destroy: false prevent_destroy: true + +>>> update_file.py databricks.yml <<: *pipeline_base diff --git a/acceptance/bundle/lifecycle/prevent-destroy/script b/acceptance/bundle/lifecycle/prevent-destroy/script new file mode 100644 index 0000000000..68e795f221 --- /dev/null +++ b/acceptance/bundle/lifecycle/prevent-destroy/script @@ -0,0 +1,18 @@ +trace $CLI bundle validate + +trace $CLI bundle deploy + +trace musterr $CLI bundle destroy --auto-approve >out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 + +# Changing the catalog name, deploy must fail because pipeline will be recreated +trace update_file.py databricks.yml 'catalog: main' 'catalog: mainnew' +trace musterr $CLI bundle deploy >>out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 + +# Removing the prevent_destroy, deploy must succeed +trace update_file.py databricks.yml 'prevent_destroy: true' 'prevent_destroy: false' +trace $CLI bundle deploy --auto-approve >>out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 + +trace update_file.py databricks.yml 'prevent_destroy: false' 'prevent_destroy: true' +# Removing the job, deploy must succeed +trace update_file.py databricks.yml '<<: *pipeline_base' '' +trace $CLI bundle deploy --auto-approve >>out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 diff --git a/acceptance/bundle/lifecycle/prevent_deploy/test-notebook.py b/acceptance/bundle/lifecycle/prevent-destroy/test-notebook.py similarity index 100% rename from acceptance/bundle/lifecycle/prevent_deploy/test-notebook.py rename to acceptance/bundle/lifecycle/prevent-destroy/test-notebook.py diff --git a/acceptance/bundle/lifecycle/prevent-destroy/test.toml b/acceptance/bundle/lifecycle/prevent-destroy/test.toml new file mode 100644 index 0000000000..71228fe351 --- /dev/null +++ b/acceptance/bundle/lifecycle/prevent-destroy/test.toml @@ -0,0 +1,5 @@ +EnvVaryOutput = "DATABRICKS_CLI_DEPLOYMENT" + +Ignore = [ + ".databricks" +] diff --git a/acceptance/bundle/lifecycle/prevent_deploy/databricks.yml b/acceptance/bundle/lifecycle/prevent_deploy/databricks.yml deleted file mode 100644 index 1e5f1a97fe..0000000000 --- a/acceptance/bundle/lifecycle/prevent_deploy/databricks.yml +++ /dev/null @@ -1,16 +0,0 @@ -bundle: - name: prevent-deploy - -job: &job_base - resources: - jobs: - my_job: - name: "test-job" - tasks: - - task_key: "test-task" - notebook_task: - notebook_path: "./test-notebook.py" - lifecycle: - prevent_destroy: true - -<<: *job_base diff --git a/acceptance/bundle/lifecycle/prevent_deploy/output.txt b/acceptance/bundle/lifecycle/prevent_deploy/output.txt deleted file mode 100644 index aac3177b20..0000000000 --- a/acceptance/bundle/lifecycle/prevent_deploy/output.txt +++ /dev/null @@ -1,44 +0,0 @@ - ->>> [CLI] bundle validate -Name: prevent-deploy -Target: default -Workspace: - User: [USERNAME] - Path: /Workspace/Users/[USERNAME]/.bundle/prevent-deploy/default - -Validation OK! - ->>> [CLI] bundle deploy -Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/prevent-deploy/default/files... -Deploying resources... -Updating deployment state... -Deployment complete! - ->>> musterr [CLI] bundle destroy --auto-approve -Error: exit status 1 - -Error: Instance cannot be destroyed - - on bundle.tf.json line 15, in resource.databricks_job: - 15: "my_job": { - -Resource databricks_job.my_job has lifecycle.prevent_destroy set, but the -plan calls for this resource to be destroyed. To avoid this error and -continue with the plan, either disable lifecycle.prevent_destroy or reduce -the scope of the plan using the -target flag. - - - -Exit code (musterr): 1 - ->>> update_file.py databricks.yml <<: *job_base - ->>> musterr [CLI] bundle deploy -Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/prevent-deploy/default/files... -Deploying resources... -Updating deployment state... -Deployment complete! - -Unexpected success - -Exit code: 1 diff --git a/acceptance/bundle/lifecycle/prevent_deploy/script b/acceptance/bundle/lifecycle/prevent_deploy/script deleted file mode 100644 index 0897fe3a69..0000000000 --- a/acceptance/bundle/lifecycle/prevent_deploy/script +++ /dev/null @@ -1,9 +0,0 @@ -trace $CLI bundle validate - -trace $CLI bundle deploy - -trace musterr $CLI bundle destroy --auto-approve - -# Rename the job key forces DABs to re-create the job -trace update_file.py databricks.yml '<<: *job_base' '' -trace musterr $CLI bundle deploy diff --git a/bundle/deploy/terraform/tfdyn/convert_app_test.go b/bundle/deploy/terraform/tfdyn/convert_app_test.go index d58e9ff214..810f3fb84e 100644 --- a/bundle/deploy/terraform/tfdyn/convert_app_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_app_test.go @@ -189,3 +189,36 @@ func TestConvertAppWithLifecycle(t *testing.T) { }, }, out.App["my_app"]) } + +func TestConvertAppWithLifecycleFalse(t *testing.T) { + src := resources.App{ + SourceCodePath: "./app", + Config: map[string]any{ + "command": []string{"python", "app.py"}, + }, + App: apps.App{ + Name: "app_id", + }, + Lifecycle: resources.Lifecycle{ + PreventDestroy: false, + }, + } + + vin, err := convert.FromTyped(src, dyn.NilValue) + require.NoError(t, err) + + ctx := context.Background() + out := schema.NewResources() + err = appConverter{}.Convert(ctx, "my_app", vin, out) + require.NoError(t, err) + + // Assert equality on the app + assert.Equal(t, map[string]any{ + "name": "app_id", + "description": "", + "no_compute": true, + "lifecycle": map[string]any{ + "prevent_destroy": false, + }, + }, out.App["my_app"]) +} diff --git a/bundle/phases/deploy.go b/bundle/phases/deploy.go index 1f6043f457..6c1fb3ef61 100644 --- a/bundle/phases/deploy.go +++ b/bundle/phases/deploy.go @@ -50,6 +50,10 @@ func approvalForDeploy(ctx context.Context, b *bundle.Bundle) (bool, error) { if err != nil { return false, err } + err = checkForPreventDestroy(b, actions, false) + if err != nil { + return false, err + } b.Plan.Actions = actions types := []deployplan.ActionType{deployplan.ActionTypeRecreate, deployplan.ActionTypeDelete} diff --git a/bundle/phases/destroy.go b/bundle/phases/destroy.go index 51b78c42ad..0dc55ad4b7 100644 --- a/bundle/phases/destroy.go +++ b/bundle/phases/destroy.go @@ -58,6 +58,11 @@ func approvalForDestroy(ctx context.Context, b *bundle.Bundle) (bool, error) { return false, err } + err = checkForPreventDestroy(b, deleteActions, true) + if err != nil { + return false, err + } + b.Plan.Actions = deleteActions if len(deleteActions) > 0 { diff --git a/bundle/phases/plan.go b/bundle/phases/plan.go new file mode 100644 index 0000000000..9ab1a4f869 --- /dev/null +++ b/bundle/phases/plan.go @@ -0,0 +1,32 @@ +package phases + +import ( + "fmt" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/deployplan" + "github.com/databricks/cli/libs/dyn" +) + +// checkForPreventDestroy checks if the resource has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. +// If it does, it returns an error. +func checkForPreventDestroy(b *bundle.Bundle, actions []deployplan.Action, isDestroy bool) error { + root := b.Config.Value() + for _, action := range actions { + if action.ActionType == deployplan.ActionTypeRecreate || (isDestroy && action.ActionType == deployplan.ActionTypeDelete) { + path := dyn.NewPath(dyn.Key("resources"), dyn.Key(action.Group), dyn.Key(action.Name), dyn.Key("lifecycle")) + lifecycleV, err := dyn.GetByPath(root, path) + if err != nil { + return err + } + if lifecycleV.Kind() == dyn.KindMap { + preventDestroyV := lifecycleV.Get("prevent_destroy") + preventDestroy, ok := preventDestroyV.AsBool() + if ok && preventDestroy { + return fmt.Errorf("resource %s has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. To avoid this error, disable lifecycle.prevent_destroy", action.Name) + } + } + } + } + return nil +} From fd67d51ff4ae9a821f22c784160c40bdc647f96b Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Tue, 2 Sep 2025 13:26:52 +0200 Subject: [PATCH 03/19] fixes --- .../bundle/lifecycle/prevent-destroy/output.txt | 8 -------- acceptance/bundle/lifecycle/prevent-destroy/script | 8 ++++---- bundle/phases/deploy.go | 1 - bundle/phases/destroy.go | 2 -- bundle/phases/plan.go | 11 ++++++----- 5 files changed, 10 insertions(+), 20 deletions(-) diff --git a/acceptance/bundle/lifecycle/prevent-destroy/output.txt b/acceptance/bundle/lifecycle/prevent-destroy/output.txt index ac982f8aa7..13b0ba466b 100644 --- a/acceptance/bundle/lifecycle/prevent-destroy/output.txt +++ b/acceptance/bundle/lifecycle/prevent-destroy/output.txt @@ -13,11 +13,3 @@ Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/prevent-destroy/de Deploying resources... Updating deployment state... Deployment complete! - ->>> update_file.py databricks.yml catalog: main catalog: mainnew - ->>> update_file.py databricks.yml prevent_destroy: true prevent_destroy: false - ->>> update_file.py databricks.yml prevent_destroy: false prevent_destroy: true - ->>> update_file.py databricks.yml <<: *pipeline_base diff --git a/acceptance/bundle/lifecycle/prevent-destroy/script b/acceptance/bundle/lifecycle/prevent-destroy/script index 68e795f221..6e02c58b6f 100644 --- a/acceptance/bundle/lifecycle/prevent-destroy/script +++ b/acceptance/bundle/lifecycle/prevent-destroy/script @@ -5,14 +5,14 @@ trace $CLI bundle deploy trace musterr $CLI bundle destroy --auto-approve >out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 # Changing the catalog name, deploy must fail because pipeline will be recreated -trace update_file.py databricks.yml 'catalog: main' 'catalog: mainnew' +update_file.py databricks.yml 'catalog: main' 'catalog: mainnew' trace musterr $CLI bundle deploy >>out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 # Removing the prevent_destroy, deploy must succeed -trace update_file.py databricks.yml 'prevent_destroy: true' 'prevent_destroy: false' +update_file.py databricks.yml 'prevent_destroy: true' 'prevent_destroy: false' trace $CLI bundle deploy --auto-approve >>out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 -trace update_file.py databricks.yml 'prevent_destroy: false' 'prevent_destroy: true' +update_file.py databricks.yml 'prevent_destroy: false' 'prevent_destroy: true' # Removing the job, deploy must succeed -trace update_file.py databricks.yml '<<: *pipeline_base' '' +update_file.py databricks.yml '<<: *pipeline_base' '' trace $CLI bundle deploy --auto-approve >>out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 diff --git a/bundle/phases/deploy.go b/bundle/phases/deploy.go index 40e07741f6..6e49e3a25b 100644 --- a/bundle/phases/deploy.go +++ b/bundle/phases/deploy.go @@ -56,7 +56,6 @@ func approvalForDeploy(ctx context.Context, b *bundle.Bundle) (bool, error) { if err != nil { return false, err } - b.Plan.Actions = actions types := []deployplan.ActionType{deployplan.ActionTypeRecreate, deployplan.ActionTypeDelete} schemaActions := deployplan.FilterGroup(actions, "schemas", types...) diff --git a/bundle/phases/destroy.go b/bundle/phases/destroy.go index 704c1e0f22..059df3dddf 100644 --- a/bundle/phases/destroy.go +++ b/bundle/phases/destroy.go @@ -70,8 +70,6 @@ func approvalForDestroy(ctx context.Context, b *bundle.Bundle) (bool, error) { return false, err } - b.Plan.Actions = deleteActions - if len(deleteActions) > 0 { cmdio.LogString(ctx, "The following resources will be deleted:") for _, a := range deleteActions { diff --git a/bundle/phases/plan.go b/bundle/phases/plan.go index c625ff6b55..b6a1ac908e 100644 --- a/bundle/phases/plan.go +++ b/bundle/phases/plan.go @@ -2,16 +2,17 @@ package phases import ( "context" - "fmt" + "fmt" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/deploy" - "github.com/databricks/cli/bundle/deployplan" "github.com/databricks/cli/bundle/deploy/terraform" + "github.com/databricks/cli/bundle/deployplan" "github.com/databricks/cli/bundle/libraries" "github.com/databricks/cli/bundle/statemgmt" "github.com/databricks/cli/bundle/trampoline" + "github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/logdiag" ) @@ -48,14 +49,14 @@ func deployPrepare(ctx context.Context, b *bundle.Bundle) map[string][]libraries return libs } - + // checkForPreventDestroy checks if the resource has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. // If it does, it returns an error. func checkForPreventDestroy(b *bundle.Bundle, actions []deployplan.Action, isDestroy bool) error { root := b.Config.Value() for _, action := range actions { if action.ActionType == deployplan.ActionTypeRecreate || (isDestroy && action.ActionType == deployplan.ActionTypeDelete) { - path := dyn.NewPath(dyn.Key("resources"), dyn.Key(action.Group), dyn.Key(action.Name), dyn.Key("lifecycle")) + path := dyn.NewPath(dyn.Key("resources"), dyn.Key(action.Group), dyn.Key(action.Key), dyn.Key("lifecycle")) lifecycleV, err := dyn.GetByPath(root, path) if err != nil { return err @@ -64,7 +65,7 @@ func checkForPreventDestroy(b *bundle.Bundle, actions []deployplan.Action, isDes preventDestroyV := lifecycleV.Get("prevent_destroy") preventDestroy, ok := preventDestroyV.AsBool() if ok && preventDestroy { - return fmt.Errorf("resource %s has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. To avoid this error, disable lifecycle.prevent_destroy", action.Name) + return fmt.Errorf("resource %s has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. To avoid this error, disable lifecycle.prevent_destroy", action.Key) } } } From cc79894ee78b61e434e774acba0dc6237d20479c Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Tue, 2 Sep 2025 14:56:23 +0200 Subject: [PATCH 04/19] fixes --- bundle/phases/plan.go | 4 +- bundle/schema/jsonschema.json | 93 ++++++++++++++++++++++++++++++++++- 2 files changed, 94 insertions(+), 3 deletions(-) diff --git a/bundle/phases/plan.go b/bundle/phases/plan.go index b6a1ac908e..5931cd8fc9 100644 --- a/bundle/phases/plan.go +++ b/bundle/phases/plan.go @@ -58,9 +58,11 @@ func checkForPreventDestroy(b *bundle.Bundle, actions []deployplan.Action, isDes if action.ActionType == deployplan.ActionTypeRecreate || (isDestroy && action.ActionType == deployplan.ActionTypeDelete) { path := dyn.NewPath(dyn.Key("resources"), dyn.Key(action.Group), dyn.Key(action.Key), dyn.Key("lifecycle")) lifecycleV, err := dyn.GetByPath(root, path) + // If there is no lifecycle, skip if err != nil { - return err + return nil } + if lifecycleV.Kind() == dyn.KindMap { preventDestroyV := lifecycleV.Get("prevent_destroy") preventDestroy, ok := preventDestroyV.AsBool() diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index 9c32f2dba7..ae69be9b57 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -107,6 +107,9 @@ "description": "The unique identifier of the app.", "$ref": "#/$defs/string" }, + "lifecycle": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" + }, "name": { "description": "The name of the app. The name must contain only lowercase alphanumeric characters and hyphens.\nIt must be unique within the workspace.", "$ref": "#/$defs/string" @@ -159,6 +162,7 @@ "additionalProperties": false, "required": [ "source_code_path", + "lifecycle", "name" ] }, @@ -291,6 +295,9 @@ "kind": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Kind" }, + "lifecycle": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" + }, "node_type_id": { "description": "This field encodes, through a single value, the resources available to each of\nthe Spark nodes in this cluster. For example, the Spark nodes can be provisioned\nand optimized for memory or compute intensive workloads. A list of available node\ntypes can be retrieved by using the :method:clusters/listNodeTypes API call.", "$ref": "#/$defs/string" @@ -346,6 +353,9 @@ } }, "additionalProperties": false, + "required": [ + "lifecycle" + ], "markdownDescription": "The cluster resource defines an [all-purpose cluster](https://docs.databricks.com/api/workspace/clusters/create)." }, { @@ -426,6 +436,9 @@ "file_path": { "$ref": "#/$defs/string" }, + "lifecycle": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" + }, "lifecycle_state": { "description": "The state of the dashboard resource. Used for tracking trashed status.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/dashboards.LifecycleState" @@ -455,6 +468,9 @@ } }, "additionalProperties": false, + "required": [ + "lifecycle" + ], "markdownDescription": "The dashboard resource allows you to manage [AI/BI dashboards](https://docs.databricks.com/api/workspace/lakeview/create) in a bundle. For information about AI/BI dashboards, see [link](https://docs.databricks.com/dashboards/index.html)." }, { @@ -576,6 +592,9 @@ "enable_readable_secondaries": { "$ref": "#/$defs/bool" }, + "lifecycle": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" + }, "name": { "$ref": "#/$defs/string" }, @@ -612,6 +631,7 @@ }, "additionalProperties": false, "required": [ + "lifecycle", "name" ] }, @@ -728,6 +748,9 @@ "description": "A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.JobCluster" }, + "lifecycle": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" + }, "max_concurrent_runs": { "description": "An optional maximum allowed number of concurrent runs of the job.\nSet this value if you want to be able to execute multiple runs of the same job concurrently.\nThis is useful for example if you trigger your job on a frequent schedule and want to allow consecutive runs to overlap with each other, or if you want to trigger multiple runs which differ by their input parameters.\nThis setting affects only new runs. For example, suppose the job’s concurrency is 4 and there are 4 concurrent active runs. Then setting the concurrency to 3 won’t kill any of the active runs.\nHowever, from then on, new runs are skipped unless there are fewer than 3 active runs.\nThis value cannot exceed 1000. Setting this value to `0` causes all new runs to be skipped.", "$ref": "#/$defs/int" @@ -790,6 +813,9 @@ } }, "additionalProperties": false, + "required": [ + "lifecycle" + ], "markdownDescription": "The job resource allows you to define [jobs and their corresponding tasks](https://docs.databricks.com/api/workspace/jobs/create) in your bundle. For information about jobs, see [link](https://docs.databricks.com/jobs/index.html). For a tutorial that uses a Databricks Asset Bundles template to create a job, see [link](https://docs.databricks.com/dev-tools/bundles/jobs-tutorial.html)." }, { @@ -844,6 +870,23 @@ } ] }, + "resources.Lifecycle": { + "oneOf": [ + { + "type": "object", + "properties": { + "prevent_destroy": { + "$ref": "#/$defs/bool" + } + }, + "additionalProperties": false + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "resources.MlflowExperiment": { "oneOf": [ { @@ -866,6 +909,9 @@ "description": "Last update time", "$ref": "#/$defs/int64" }, + "lifecycle": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" + }, "lifecycle_stage": { "description": "Current life cycle stage of the experiment: \"active\" or \"deleted\".\nDeleted experiments are not returned by APIs.", "$ref": "#/$defs/string" @@ -883,6 +929,9 @@ } }, "additionalProperties": false, + "required": [ + "lifecycle" + ], "markdownDescription": "The experiment resource allows you to define [MLflow experiments](https://docs.databricks.com/api/workspace/experiments/createexperiment) in a bundle. For information about MLflow experiments, see [link](https://docs.databricks.com/mlflow/experiments.html)." }, { @@ -945,6 +994,9 @@ "description": "Optional description for registered model.", "$ref": "#/$defs/string" }, + "lifecycle": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" + }, "name": { "description": "Register models under this name", "$ref": "#/$defs/string" @@ -959,6 +1011,7 @@ }, "additionalProperties": false, "required": [ + "lifecycle", "name" ], "markdownDescription": "The model resource allows you to define [legacy models](https://docs.databricks.com/api/workspace/modelregistry/createmodel) in bundles. Databricks recommends you use Unity Catalog [registered models](https://docs.databricks.com/dev-tools/bundles/reference.html#registered-model) instead." @@ -1040,6 +1093,9 @@ "description": "Email notification settings.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.EmailNotifications" }, + "lifecycle": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" + }, "name": { "description": "The name of the serving endpoint. This field is required and must be unique across a Databricks workspace.\nAn endpoint name can consist of alphanumeric characters, dashes, and underscores.", "$ref": "#/$defs/string" @@ -1064,6 +1120,7 @@ }, "additionalProperties": false, "required": [ + "lifecycle", "name" ], "markdownDescription": "The model_serving_endpoint resource allows you to define [model serving endpoints](https://docs.databricks.com/api/workspace/servingendpoints/create). See [link](https://docs.databricks.com/machine-learning/model-serving/manage-serving-endpoints.html)." @@ -1225,6 +1282,9 @@ "description": "Libraries or code needed by this deployment.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineLibrary" }, + "lifecycle": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" + }, "name": { "description": "Friendly identifier for this pipeline.", "$ref": "#/$defs/string" @@ -1285,6 +1345,9 @@ } }, "additionalProperties": false, + "required": [ + "lifecycle" + ], "markdownDescription": "The pipeline resource allows you to create Delta Live Tables [pipelines](https://docs.databricks.com/api/workspace/pipelines/create). For information about pipelines, see [link](https://docs.databricks.com/dlt/index.html). For a tutorial that uses the Databricks Asset Bundles template to create a pipeline, see [link](https://docs.databricks.com/dev-tools/bundles/pipelines-tutorial.html)." }, { @@ -1369,6 +1432,9 @@ "description": "[Create:ERR Update:IGN] The latest error message for a monitor failure.", "$ref": "#/$defs/string" }, + "lifecycle": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" + }, "notifications": { "description": "[Create:OPT Update:OPT] Field for specifying notification settings.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorNotifications" @@ -1408,6 +1474,7 @@ "additionalProperties": false, "required": [ "table_name", + "lifecycle", "assets_dir", "output_schema_name" ], @@ -1435,6 +1502,9 @@ "grants": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Grant" }, + "lifecycle": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" + }, "name": { "description": "The name of the registered model", "$ref": "#/$defs/string" @@ -1450,6 +1520,7 @@ }, "additionalProperties": false, "required": [ + "lifecycle", "catalog_name", "name", "schema_name" @@ -1478,6 +1549,9 @@ "grants": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.SchemaGrant" }, + "lifecycle": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" + }, "name": { "description": "Name of schema, relative to parent catalog.", "$ref": "#/$defs/string" @@ -1492,6 +1566,7 @@ }, "additionalProperties": false, "required": [ + "lifecycle", "catalog_name", "name" ], @@ -1566,6 +1641,9 @@ "description": "The metadata for the secret scope if the `backend_type` is `AZURE_KEYVAULT`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/workspace.AzureKeyVaultSecretScopeMetadata" }, + "lifecycle": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" + }, "name": { "description": "Scope name requested by the user. Scope names are unique.", "$ref": "#/$defs/string" @@ -1577,7 +1655,8 @@ }, "additionalProperties": false, "required": [ - "name" + "name", + "lifecycle" ] }, { @@ -1670,6 +1749,9 @@ "deprecationMessage": "This field is deprecated", "deprecated": true }, + "lifecycle": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" + }, "max_num_clusters": { "description": "Maximum number of clusters that the autoscaler will create to handle concurrent queries.\n\nSupported values:\n - Must be \u003e= min_num_clusters\n - Must be \u003c= 30.\n\nDefaults to min_clusters if unset.", "$ref": "#/$defs/int" @@ -1696,7 +1778,10 @@ "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.CreateWarehouseRequestWarehouseType" } }, - "additionalProperties": false + "additionalProperties": false, + "required": [ + "lifecycle" + ] }, { "type": "string", @@ -1807,6 +1892,9 @@ "grants": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.VolumeGrant" }, + "lifecycle": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" + }, "name": { "description": "The name of the volume", "$ref": "#/$defs/string" @@ -1825,6 +1913,7 @@ }, "additionalProperties": false, "required": [ + "lifecycle", "catalog_name", "name", "schema_name" From 9dc81cedef8910277f3e8984f88f31d8adda3207 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Tue, 2 Sep 2025 15:04:50 +0200 Subject: [PATCH 05/19] removed unnecessary test --- .../terraform/tfdyn/convert_app_test.go | 33 ------------------- 1 file changed, 33 deletions(-) diff --git a/bundle/deploy/terraform/tfdyn/convert_app_test.go b/bundle/deploy/terraform/tfdyn/convert_app_test.go index 810f3fb84e..d58e9ff214 100644 --- a/bundle/deploy/terraform/tfdyn/convert_app_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_app_test.go @@ -189,36 +189,3 @@ func TestConvertAppWithLifecycle(t *testing.T) { }, }, out.App["my_app"]) } - -func TestConvertAppWithLifecycleFalse(t *testing.T) { - src := resources.App{ - SourceCodePath: "./app", - Config: map[string]any{ - "command": []string{"python", "app.py"}, - }, - App: apps.App{ - Name: "app_id", - }, - Lifecycle: resources.Lifecycle{ - PreventDestroy: false, - }, - } - - vin, err := convert.FromTyped(src, dyn.NilValue) - require.NoError(t, err) - - ctx := context.Background() - out := schema.NewResources() - err = appConverter{}.Convert(ctx, "my_app", vin, out) - require.NoError(t, err) - - // Assert equality on the app - assert.Equal(t, map[string]any{ - "name": "app_id", - "description": "", - "no_compute": true, - "lifecycle": map[string]any{ - "prevent_destroy": false, - }, - }, out.App["my_app"]) -} From 04b059d87b6cf4481215779045666619649e521f Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Tue, 2 Sep 2025 16:48:10 +0200 Subject: [PATCH 06/19] update schema --- bundle/internal/schema/annotations.yml | 63 ++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) diff --git a/bundle/internal/schema/annotations.yml b/bundle/internal/schema/annotations.yml index 1d9d4e48a9..3d698b0a35 100644 --- a/bundle/internal/schema/annotations.yml +++ b/bundle/internal/schema/annotations.yml @@ -439,6 +439,10 @@ github.com/databricks/cli/bundle/config.Workspace: "state_path": "description": |- The workspace state path +github.com/databricks/cli/bundle/config/resources.App: + "lifecycle": + "description": |- + PLACEHOLDER github.com/databricks/cli/bundle/config/resources.AppPermission: "group_name": "description": |- @@ -452,6 +456,10 @@ github.com/databricks/cli/bundle/config/resources.AppPermission: "user_name": "description": |- PLACEHOLDER +github.com/databricks/cli/bundle/config/resources.Cluster: + "lifecycle": + "description": |- + PLACEHOLDER github.com/databricks/cli/bundle/config/resources.ClusterPermission: "group_name": "description": |- @@ -465,6 +473,10 @@ github.com/databricks/cli/bundle/config/resources.ClusterPermission: "user_name": "description": |- PLACEHOLDER +github.com/databricks/cli/bundle/config/resources.Dashboard: + "lifecycle": + "description": |- + PLACEHOLDER github.com/databricks/cli/bundle/config/resources.DashboardPermission: "group_name": "description": |- @@ -478,6 +490,10 @@ github.com/databricks/cli/bundle/config/resources.DashboardPermission: "user_name": "description": |- PLACEHOLDER +github.com/databricks/cli/bundle/config/resources.DatabaseInstance: + "lifecycle": + "description": |- + PLACEHOLDER github.com/databricks/cli/bundle/config/resources.DatabaseInstancePermission: "group_name": "description": |- @@ -498,6 +514,10 @@ github.com/databricks/cli/bundle/config/resources.Grant: "privileges": "description": |- The privileges to grant to the specified entity +github.com/databricks/cli/bundle/config/resources.Job: + "lifecycle": + "description": |- + PLACEHOLDER github.com/databricks/cli/bundle/config/resources.JobPermission: "group_name": "description": |- @@ -511,6 +531,14 @@ github.com/databricks/cli/bundle/config/resources.JobPermission: "user_name": "description": |- PLACEHOLDER +github.com/databricks/cli/bundle/config/resources.Lifecycle: + "prevent_destroy": + "description": |- + PLACEHOLDER +github.com/databricks/cli/bundle/config/resources.MlflowExperiment: + "lifecycle": + "description": |- + PLACEHOLDER github.com/databricks/cli/bundle/config/resources.MlflowExperimentPermission: "group_name": "description": |- @@ -524,6 +552,10 @@ github.com/databricks/cli/bundle/config/resources.MlflowExperimentPermission: "user_name": "description": |- PLACEHOLDER +github.com/databricks/cli/bundle/config/resources.MlflowModel: + "lifecycle": + "description": |- + PLACEHOLDER github.com/databricks/cli/bundle/config/resources.MlflowModelPermission: "group_name": "description": |- @@ -537,6 +569,10 @@ github.com/databricks/cli/bundle/config/resources.MlflowModelPermission: "user_name": "description": |- PLACEHOLDER +github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint: + "lifecycle": + "description": |- + PLACEHOLDER github.com/databricks/cli/bundle/config/resources.ModelServingEndpointPermission: "group_name": "description": |- @@ -568,6 +604,10 @@ github.com/databricks/cli/bundle/config/resources.Permission: "user_name": "description": |- The name of the user that has the permission set in level. +github.com/databricks/cli/bundle/config/resources.Pipeline: + "lifecycle": + "description": |- + PLACEHOLDER github.com/databricks/cli/bundle/config/resources.PipelinePermission: "group_name": "description": |- @@ -581,6 +621,18 @@ github.com/databricks/cli/bundle/config/resources.PipelinePermission: "user_name": "description": |- PLACEHOLDER +github.com/databricks/cli/bundle/config/resources.QualityMonitor: + "lifecycle": + "description": |- + PLACEHOLDER +github.com/databricks/cli/bundle/config/resources.RegisteredModel: + "lifecycle": + "description": |- + PLACEHOLDER +github.com/databricks/cli/bundle/config/resources.Schema: + "lifecycle": + "description": |- + PLACEHOLDER github.com/databricks/cli/bundle/config/resources.SchemaGrant: "principal": "description": |- @@ -595,6 +647,9 @@ github.com/databricks/cli/bundle/config/resources.SecretScope: "keyvault_metadata": "description": |- The metadata for the secret scope if the `backend_type` is `AZURE_KEYVAULT` + "lifecycle": + "description": |- + PLACEHOLDER "name": "description": |- Scope name requested by the user. Scope names are unique. @@ -614,6 +669,10 @@ github.com/databricks/cli/bundle/config/resources.SecretScopePermission: "user_name": "description": |- The name of the user that has the permission set in level. This field translates to a `principal` field in secret scope ACL. +github.com/databricks/cli/bundle/config/resources.SqlWarehouse: + "lifecycle": + "description": |- + PLACEHOLDER github.com/databricks/cli/bundle/config/resources.SqlWarehousePermission: "group_name": "description": |- @@ -652,6 +711,10 @@ github.com/databricks/cli/bundle/config/resources.SyncedDatabaseTable: "unity_catalog_provisioning_state": "description": |- PLACEHOLDER +github.com/databricks/cli/bundle/config/resources.Volume: + "lifecycle": + "description": |- + PLACEHOLDER github.com/databricks/cli/bundle/config/resources.VolumeGrant: "principal": "description": |- From 7e5b1fbba41f15b69afbbcde7b62604700044c2d Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Tue, 2 Sep 2025 16:53:39 +0200 Subject: [PATCH 07/19] omitempty --- bundle/config/resources/apps.go | 2 +- bundle/config/resources/clusters.go | 2 +- bundle/config/resources/dashboard.go | 2 +- bundle/config/resources/database_instance.go | 2 +- bundle/config/resources/job.go | 2 +- bundle/config/resources/mlflow_experiment.go | 2 +- bundle/config/resources/mlflow_model.go | 2 +- bundle/config/resources/model_serving_endpoint.go | 2 +- bundle/config/resources/pipeline.go | 2 +- bundle/config/resources/quality_monitor.go | 2 +- bundle/config/resources/registered_model.go | 2 +- bundle/config/resources/schema.go | 2 +- bundle/config/resources/secret_scope.go | 2 +- bundle/config/resources/sql_warehouses.go | 2 +- bundle/config/resources/volume.go | 2 +- libs/structwalk/walktype_test.go | 2 +- 16 files changed, 16 insertions(+), 16 deletions(-) diff --git a/bundle/config/resources/apps.go b/bundle/config/resources/apps.go index 688116375d..a28b66ae5a 100644 --- a/bundle/config/resources/apps.go +++ b/bundle/config/resources/apps.go @@ -42,7 +42,7 @@ type App struct { URL string `json:"url,omitempty" bundle:"internal"` apps.App - Lifecycle Lifecycle `json:"lifecycle"` + Lifecycle Lifecycle `json:"lifecycle,omitempty"` } func (a *App) UnmarshalJSON(b []byte) error { diff --git a/bundle/config/resources/clusters.go b/bundle/config/resources/clusters.go index 36403c6bef..af2d66eb6b 100644 --- a/bundle/config/resources/clusters.go +++ b/bundle/config/resources/clusters.go @@ -29,7 +29,7 @@ type Cluster struct { URL string `json:"url,omitempty" bundle:"internal"` compute.ClusterSpec - Lifecycle Lifecycle `json:"lifecycle"` + Lifecycle Lifecycle `json:"lifecycle,omitempty"` } func (s *Cluster) UnmarshalJSON(b []byte) error { diff --git a/bundle/config/resources/dashboard.go b/bundle/config/resources/dashboard.go index 822ceb9cb2..b58a216e80 100644 --- a/bundle/config/resources/dashboard.go +++ b/bundle/config/resources/dashboard.go @@ -51,7 +51,7 @@ type Dashboard struct { URL string `json:"url,omitempty" bundle:"internal"` DashboardConfig - Lifecycle Lifecycle `json:"lifecycle"` + Lifecycle Lifecycle `json:"lifecycle,omitempty"` // FilePath points to the local `.lvdash.json` file containing the dashboard definition. // This is inlined into serialized_dashboard during deployment. The file_path is kept around diff --git a/bundle/config/resources/database_instance.go b/bundle/config/resources/database_instance.go index 977ca514d9..5eebe0450e 100644 --- a/bundle/config/resources/database_instance.go +++ b/bundle/config/resources/database_instance.go @@ -29,7 +29,7 @@ type DatabaseInstance struct { ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` database.DatabaseInstance - Lifecycle Lifecycle `json:"lifecycle"` + Lifecycle Lifecycle `json:"lifecycle,omitempty"` } func (d *DatabaseInstance) Exists(ctx context.Context, w *databricks.WorkspaceClient, name string) (bool, error) { diff --git a/bundle/config/resources/job.go b/bundle/config/resources/job.go index 5d2bc29b9b..198a0c7b41 100644 --- a/bundle/config/resources/job.go +++ b/bundle/config/resources/job.go @@ -30,7 +30,7 @@ type Job struct { URL string `json:"url,omitempty" bundle:"internal"` jobs.JobSettings - Lifecycle Lifecycle `json:"lifecycle"` + Lifecycle Lifecycle `json:"lifecycle,omitempty"` } func (j *Job) UnmarshalJSON(b []byte) error { diff --git a/bundle/config/resources/mlflow_experiment.go b/bundle/config/resources/mlflow_experiment.go index aedfebc34e..e7053d54d4 100644 --- a/bundle/config/resources/mlflow_experiment.go +++ b/bundle/config/resources/mlflow_experiment.go @@ -29,7 +29,7 @@ type MlflowExperiment struct { URL string `json:"url,omitempty" bundle:"internal"` ml.Experiment - Lifecycle Lifecycle `json:"lifecycle"` + Lifecycle Lifecycle `json:"lifecycle,omitempty"` } func (s *MlflowExperiment) UnmarshalJSON(b []byte) error { diff --git a/bundle/config/resources/mlflow_model.go b/bundle/config/resources/mlflow_model.go index 40ca73f118..b887857ead 100644 --- a/bundle/config/resources/mlflow_model.go +++ b/bundle/config/resources/mlflow_model.go @@ -29,7 +29,7 @@ type MlflowModel struct { URL string `json:"url,omitempty" bundle:"internal"` ml.CreateModelRequest - Lifecycle Lifecycle `json:"lifecycle"` + Lifecycle Lifecycle `json:"lifecycle,omitempty"` } func (s *MlflowModel) UnmarshalJSON(b []byte) error { diff --git a/bundle/config/resources/model_serving_endpoint.go b/bundle/config/resources/model_serving_endpoint.go index 57f4f33aa8..48d11a7fa2 100644 --- a/bundle/config/resources/model_serving_endpoint.go +++ b/bundle/config/resources/model_serving_endpoint.go @@ -26,7 +26,7 @@ type ModelServingEndpoint struct { // This represents the input args for terraform, and will get converted // to a HCL representation for CRUD serving.CreateServingEndpoint - Lifecycle Lifecycle `json:"lifecycle"` + Lifecycle Lifecycle `json:"lifecycle,omitempty"` // This represents the id (ie serving_endpoint_id) that can be used // as a reference in other resources. This value is returned by terraform. diff --git a/bundle/config/resources/pipeline.go b/bundle/config/resources/pipeline.go index 954ddb6877..6b58cb094e 100644 --- a/bundle/config/resources/pipeline.go +++ b/bundle/config/resources/pipeline.go @@ -29,7 +29,7 @@ type Pipeline struct { URL string `json:"url,omitempty" bundle:"internal"` pipelines.CreatePipeline - Lifecycle Lifecycle `json:"lifecycle"` + Lifecycle Lifecycle `json:"lifecycle,omitempty"` } func (p *Pipeline) UnmarshalJSON(b []byte) error { diff --git a/bundle/config/resources/quality_monitor.go b/bundle/config/resources/quality_monitor.go index 9e46055cae..725b935c83 100644 --- a/bundle/config/resources/quality_monitor.go +++ b/bundle/config/resources/quality_monitor.go @@ -21,7 +21,7 @@ type QualityMonitor struct { // This struct defines the creation payload for a monitor. catalog.CreateMonitor - Lifecycle Lifecycle `json:"lifecycle"` + Lifecycle Lifecycle `json:"lifecycle,omitempty"` } func (s *QualityMonitor) UnmarshalJSON(b []byte) error { diff --git a/bundle/config/resources/registered_model.go b/bundle/config/resources/registered_model.go index bd34d3958f..55501e89b9 100644 --- a/bundle/config/resources/registered_model.go +++ b/bundle/config/resources/registered_model.go @@ -24,7 +24,7 @@ type RegisteredModel struct { // This represents the input args for terraform, and will get converted // to a HCL representation for CRUD catalog.CreateRegisteredModelRequest - Lifecycle Lifecycle `json:"lifecycle"` + Lifecycle Lifecycle `json:"lifecycle,omitempty"` ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` URL string `json:"url,omitempty" bundle:"internal"` diff --git a/bundle/config/resources/schema.go b/bundle/config/resources/schema.go index fc8bbac592..12e100334d 100644 --- a/bundle/config/resources/schema.go +++ b/bundle/config/resources/schema.go @@ -68,7 +68,7 @@ type Schema struct { ID string `json:"id,omitempty" bundle:"readonly"` catalog.CreateSchema - Lifecycle Lifecycle `json:"lifecycle"` + Lifecycle Lifecycle `json:"lifecycle,omitempty"` ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` URL string `json:"url,omitempty" bundle:"internal"` diff --git a/bundle/config/resources/secret_scope.go b/bundle/config/resources/secret_scope.go index 445a41eb0e..de8f90a2bc 100644 --- a/bundle/config/resources/secret_scope.go +++ b/bundle/config/resources/secret_scope.go @@ -44,7 +44,7 @@ type SecretScope struct { // The metadata for the secret scope if the type is `AZURE_KEYVAULT` KeyvaultMetadata *workspace.AzureKeyVaultSecretScopeMetadata `json:"keyvault_metadata,omitempty"` - Lifecycle Lifecycle `json:"lifecycle"` + Lifecycle Lifecycle `json:"lifecycle,omitempty"` } func (s *SecretScope) UnmarshalJSON(b []byte) error { diff --git a/bundle/config/resources/sql_warehouses.go b/bundle/config/resources/sql_warehouses.go index 920d4bf136..c028a25724 100644 --- a/bundle/config/resources/sql_warehouses.go +++ b/bundle/config/resources/sql_warehouses.go @@ -28,7 +28,7 @@ type SqlWarehouse struct { URL string `json:"url,omitempty" bundle:"internal"` sql.CreateWarehouseRequest - Lifecycle Lifecycle `json:"lifecycle"` + Lifecycle Lifecycle `json:"lifecycle,omitempty"` } func (sw *SqlWarehouse) UnmarshalJSON(b []byte) error { diff --git a/bundle/config/resources/volume.go b/bundle/config/resources/volume.go index 9da07005f1..33bb06c366 100644 --- a/bundle/config/resources/volume.go +++ b/bundle/config/resources/volume.go @@ -49,7 +49,7 @@ type Volume struct { ID string `json:"id,omitempty" bundle:"readonly"` catalog.CreateVolumeRequestContent - Lifecycle Lifecycle `json:"lifecycle"` + Lifecycle Lifecycle `json:"lifecycle,omitempty"` ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` URL string `json:"url,omitempty" bundle:"internal"` diff --git a/libs/structwalk/walktype_test.go b/libs/structwalk/walktype_test.go index c84b5b810f..e0fb2faebc 100644 --- a/libs/structwalk/walktype_test.go +++ b/libs/structwalk/walktype_test.go @@ -123,7 +123,7 @@ func TestTypeJobSettings(t *testing.T) { func TestTypeRoot(t *testing.T) { testStruct(t, reflect.TypeOf(config.Root{}), - 3600, 4000, // 3980 at the time of the update + 4000, 4300, // 4003 at the time of the update map[string]any{ ".bundle.target": "", `.variables[*].lookup.dashboard`: "", From db9bafa1cd9916901b4895a367ae0d9a6ef02b7f Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Tue, 2 Sep 2025 17:01:17 +0200 Subject: [PATCH 08/19] fixed schema --- bundle/internal/schema/annotations.yml | 56 ------------------- .../schema/annotations_openapi_overrides.yml | 42 ++++++++++++++ bundle/schema/jsonschema.json | 31 +--------- 3 files changed, 44 insertions(+), 85 deletions(-) diff --git a/bundle/internal/schema/annotations.yml b/bundle/internal/schema/annotations.yml index 3d698b0a35..3fbf95e8ba 100644 --- a/bundle/internal/schema/annotations.yml +++ b/bundle/internal/schema/annotations.yml @@ -439,10 +439,6 @@ github.com/databricks/cli/bundle/config.Workspace: "state_path": "description": |- The workspace state path -github.com/databricks/cli/bundle/config/resources.App: - "lifecycle": - "description": |- - PLACEHOLDER github.com/databricks/cli/bundle/config/resources.AppPermission: "group_name": "description": |- @@ -456,10 +452,6 @@ github.com/databricks/cli/bundle/config/resources.AppPermission: "user_name": "description": |- PLACEHOLDER -github.com/databricks/cli/bundle/config/resources.Cluster: - "lifecycle": - "description": |- - PLACEHOLDER github.com/databricks/cli/bundle/config/resources.ClusterPermission: "group_name": "description": |- @@ -473,10 +465,6 @@ github.com/databricks/cli/bundle/config/resources.ClusterPermission: "user_name": "description": |- PLACEHOLDER -github.com/databricks/cli/bundle/config/resources.Dashboard: - "lifecycle": - "description": |- - PLACEHOLDER github.com/databricks/cli/bundle/config/resources.DashboardPermission: "group_name": "description": |- @@ -490,10 +478,6 @@ github.com/databricks/cli/bundle/config/resources.DashboardPermission: "user_name": "description": |- PLACEHOLDER -github.com/databricks/cli/bundle/config/resources.DatabaseInstance: - "lifecycle": - "description": |- - PLACEHOLDER github.com/databricks/cli/bundle/config/resources.DatabaseInstancePermission: "group_name": "description": |- @@ -514,10 +498,6 @@ github.com/databricks/cli/bundle/config/resources.Grant: "privileges": "description": |- The privileges to grant to the specified entity -github.com/databricks/cli/bundle/config/resources.Job: - "lifecycle": - "description": |- - PLACEHOLDER github.com/databricks/cli/bundle/config/resources.JobPermission: "group_name": "description": |- @@ -535,10 +515,6 @@ github.com/databricks/cli/bundle/config/resources.Lifecycle: "prevent_destroy": "description": |- PLACEHOLDER -github.com/databricks/cli/bundle/config/resources.MlflowExperiment: - "lifecycle": - "description": |- - PLACEHOLDER github.com/databricks/cli/bundle/config/resources.MlflowExperimentPermission: "group_name": "description": |- @@ -552,10 +528,6 @@ github.com/databricks/cli/bundle/config/resources.MlflowExperimentPermission: "user_name": "description": |- PLACEHOLDER -github.com/databricks/cli/bundle/config/resources.MlflowModel: - "lifecycle": - "description": |- - PLACEHOLDER github.com/databricks/cli/bundle/config/resources.MlflowModelPermission: "group_name": "description": |- @@ -569,10 +541,6 @@ github.com/databricks/cli/bundle/config/resources.MlflowModelPermission: "user_name": "description": |- PLACEHOLDER -github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint: - "lifecycle": - "description": |- - PLACEHOLDER github.com/databricks/cli/bundle/config/resources.ModelServingEndpointPermission: "group_name": "description": |- @@ -604,10 +572,6 @@ github.com/databricks/cli/bundle/config/resources.Permission: "user_name": "description": |- The name of the user that has the permission set in level. -github.com/databricks/cli/bundle/config/resources.Pipeline: - "lifecycle": - "description": |- - PLACEHOLDER github.com/databricks/cli/bundle/config/resources.PipelinePermission: "group_name": "description": |- @@ -621,18 +585,6 @@ github.com/databricks/cli/bundle/config/resources.PipelinePermission: "user_name": "description": |- PLACEHOLDER -github.com/databricks/cli/bundle/config/resources.QualityMonitor: - "lifecycle": - "description": |- - PLACEHOLDER -github.com/databricks/cli/bundle/config/resources.RegisteredModel: - "lifecycle": - "description": |- - PLACEHOLDER -github.com/databricks/cli/bundle/config/resources.Schema: - "lifecycle": - "description": |- - PLACEHOLDER github.com/databricks/cli/bundle/config/resources.SchemaGrant: "principal": "description": |- @@ -669,10 +621,6 @@ github.com/databricks/cli/bundle/config/resources.SecretScopePermission: "user_name": "description": |- The name of the user that has the permission set in level. This field translates to a `principal` field in secret scope ACL. -github.com/databricks/cli/bundle/config/resources.SqlWarehouse: - "lifecycle": - "description": |- - PLACEHOLDER github.com/databricks/cli/bundle/config/resources.SqlWarehousePermission: "group_name": "description": |- @@ -711,10 +659,6 @@ github.com/databricks/cli/bundle/config/resources.SyncedDatabaseTable: "unity_catalog_provisioning_state": "description": |- PLACEHOLDER -github.com/databricks/cli/bundle/config/resources.Volume: - "lifecycle": - "description": |- - PLACEHOLDER github.com/databricks/cli/bundle/config/resources.VolumeGrant: "principal": "description": |- diff --git a/bundle/internal/schema/annotations_openapi_overrides.yml b/bundle/internal/schema/annotations_openapi_overrides.yml index 2e49c2d516..680b15b660 100644 --- a/bundle/internal/schema/annotations_openapi_overrides.yml +++ b/bundle/internal/schema/annotations_openapi_overrides.yml @@ -14,6 +14,9 @@ github.com/databricks/cli/bundle/config/resources.App: "effective_budget_policy_id": "description": |- PLACEHOLDER + "lifecycle": + "description": |- + PLACEHOLDER "oauth2_app_client_id": "description": |- PLACEHOLDER @@ -84,6 +87,9 @@ github.com/databricks/cli/bundle/config/resources.Cluster: "kind": "description": |- PLACEHOLDER + "lifecycle": + "description": |- + PLACEHOLDER "permissions": "description": |- PLACEHOLDER @@ -140,6 +146,9 @@ github.com/databricks/cli/bundle/config/resources.Dashboard: "file_path": "description": |- PLACEHOLDER + "lifecycle": + "description": |- + PLACEHOLDER "lifecycle_state": "description": |- The state of the dashboard resource. Used for tracking trashed status. @@ -190,6 +199,9 @@ github.com/databricks/cli/bundle/config/resources.DatabaseCatalog: "description": |- PLACEHOLDER github.com/databricks/cli/bundle/config/resources.DatabaseInstance: + "lifecycle": + "description": |- + PLACEHOLDER "permissions": "description": |- PLACEHOLDER @@ -224,6 +236,9 @@ github.com/databricks/cli/bundle/config/resources.Job: "health": "description": |- PLACEHOLDER + "lifecycle": + "description": |- + PLACEHOLDER "permissions": "description": |- PLACEHOLDER @@ -258,6 +273,9 @@ github.com/databricks/cli/bundle/config/resources.MlflowExperiment: group_name: users description: MLflow experiment used to track runs ``` + "lifecycle": + "description": |- + PLACEHOLDER "permissions": "description": |- PLACEHOLDER @@ -274,6 +292,9 @@ github.com/databricks/cli/bundle/config/resources.MlflowModel: "_": "markdown_description": |- The model resource allows you to define [legacy models](/api/workspace/modelregistry/createmodel) in bundles. Databricks recommends you use Unity Catalog [registered models](#registered-model) instead. + "lifecycle": + "description": |- + PLACEHOLDER "permissions": "description": |- PLACEHOLDER @@ -319,6 +340,9 @@ github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint: "description": "description": |- PLACEHOLDER + "lifecycle": + "description": |- + PLACEHOLDER "permissions": "description": |- PLACEHOLDER @@ -358,6 +382,9 @@ github.com/databricks/cli/bundle/config/resources.Pipeline: "dry_run": "description": |- PLACEHOLDER + "lifecycle": + "description": |- + PLACEHOLDER "permissions": "description": |- PLACEHOLDER @@ -406,6 +433,9 @@ github.com/databricks/cli/bundle/config/resources.QualityMonitor: "inference_log": "description": |- PLACEHOLDER + "lifecycle": + "description": |- + PLACEHOLDER "table_name": "description": |- PLACEHOLDER @@ -432,6 +462,9 @@ github.com/databricks/cli/bundle/config/resources.RegisteredModel: "grants": "description": |- PLACEHOLDER + "lifecycle": + "description": |- + PLACEHOLDER github.com/databricks/cli/bundle/config/resources.Schema: "_": "markdown_description": |- @@ -482,6 +515,9 @@ github.com/databricks/cli/bundle/config/resources.Schema: "grants": "description": |- PLACEHOLDER + "lifecycle": + "description": |- + PLACEHOLDER "properties": "description": |- PLACEHOLDER @@ -529,6 +565,9 @@ github.com/databricks/cli/bundle/config/resources.SqlWarehouse: Configures whether the warehouse should use Photon optimized clusters. Defaults to true. + "lifecycle": + "description": |- + PLACEHOLDER "permissions": "description": |- PLACEHOLDER @@ -573,6 +612,9 @@ github.com/databricks/cli/bundle/config/resources.Volume: "grants": "description": |- PLACEHOLDER + "lifecycle": + "description": |- + PLACEHOLDER "volume_type": "description": |- PLACEHOLDER diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index 8711266cb2..fdfb93c185 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -162,7 +162,6 @@ "additionalProperties": false, "required": [ "source_code_path", - "lifecycle", "name" ] }, @@ -353,9 +352,6 @@ } }, "additionalProperties": false, - "required": [ - "lifecycle" - ], "markdownDescription": "The cluster resource defines an [all-purpose cluster](https://docs.databricks.com/api/workspace/clusters/create)." }, { @@ -468,9 +464,6 @@ } }, "additionalProperties": false, - "required": [ - "lifecycle" - ], "markdownDescription": "The dashboard resource allows you to manage [AI/BI dashboards](https://docs.databricks.com/api/workspace/lakeview/create) in a bundle. For information about AI/BI dashboards, see [link](https://docs.databricks.com/dashboards/index.html)." }, { @@ -661,7 +654,6 @@ }, "additionalProperties": false, "required": [ - "lifecycle", "name" ] }, @@ -843,9 +835,6 @@ } }, "additionalProperties": false, - "required": [ - "lifecycle" - ], "markdownDescription": "The job resource allows you to define [jobs and their corresponding tasks](https://docs.databricks.com/api/workspace/jobs/create) in your bundle. For information about jobs, see [link](https://docs.databricks.com/jobs/index.html). For a tutorial that uses a Databricks Asset Bundles template to create a job, see [link](https://docs.databricks.com/dev-tools/bundles/jobs-tutorial.html)." }, { @@ -959,9 +948,6 @@ } }, "additionalProperties": false, - "required": [ - "lifecycle" - ], "markdownDescription": "The experiment resource allows you to define [MLflow experiments](https://docs.databricks.com/api/workspace/experiments/createexperiment) in a bundle. For information about MLflow experiments, see [link](https://docs.databricks.com/mlflow/experiments.html)." }, { @@ -1041,7 +1027,6 @@ }, "additionalProperties": false, "required": [ - "lifecycle", "name" ], "markdownDescription": "The model resource allows you to define [legacy models](https://docs.databricks.com/api/workspace/modelregistry/createmodel) in bundles. Databricks recommends you use Unity Catalog [registered models](https://docs.databricks.com/dev-tools/bundles/reference.html#registered-model) instead." @@ -1150,7 +1135,6 @@ }, "additionalProperties": false, "required": [ - "lifecycle", "name" ], "markdownDescription": "The model_serving_endpoint resource allows you to define [model serving endpoints](https://docs.databricks.com/api/workspace/servingendpoints/create). See [link](https://docs.databricks.com/machine-learning/model-serving/manage-serving-endpoints.html)." @@ -1375,9 +1359,6 @@ } }, "additionalProperties": false, - "required": [ - "lifecycle" - ], "markdownDescription": "The pipeline resource allows you to create Delta Live Tables [pipelines](https://docs.databricks.com/api/workspace/pipelines/create). For information about pipelines, see [link](https://docs.databricks.com/dlt/index.html). For a tutorial that uses the Databricks Asset Bundles template to create a pipeline, see [link](https://docs.databricks.com/dev-tools/bundles/pipelines-tutorial.html)." }, { @@ -1504,7 +1485,6 @@ "additionalProperties": false, "required": [ "table_name", - "lifecycle", "assets_dir", "output_schema_name" ], @@ -1550,7 +1530,6 @@ }, "additionalProperties": false, "required": [ - "lifecycle", "catalog_name", "name", "schema_name" @@ -1596,7 +1575,6 @@ }, "additionalProperties": false, "required": [ - "lifecycle", "catalog_name", "name" ], @@ -1685,8 +1663,7 @@ }, "additionalProperties": false, "required": [ - "name", - "lifecycle" + "name" ] }, { @@ -1808,10 +1785,7 @@ "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.CreateWarehouseRequestWarehouseType" } }, - "additionalProperties": false, - "required": [ - "lifecycle" - ] + "additionalProperties": false }, { "type": "string", @@ -1944,7 +1918,6 @@ }, "additionalProperties": false, "required": [ - "lifecycle", "catalog_name", "name", "schema_name" From 71d8981d936d4009fbabf7eb9a3c22591a1d9f11 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Tue, 2 Sep 2025 17:06:30 +0200 Subject: [PATCH 09/19] make python --- .../databricks/bundles/jobs/__init__.py | 8 +++++ .../databricks/bundles/jobs/_models/job.py | 5 +++ .../bundles/jobs/_models/lifecycle.py | 32 +++++++++++++++++++ .../databricks/bundles/pipelines/__init__.py | 8 +++++ .../bundles/pipelines/_models/lifecycle.py | 32 +++++++++++++++++++ .../bundles/pipelines/_models/pipeline.py | 8 +++++ .../databricks/bundles/volumes/__init__.py | 8 +++++ .../bundles/volumes/_models/lifecycle.py | 32 +++++++++++++++++++ .../bundles/volumes/_models/volume.py | 5 +++ 9 files changed, 138 insertions(+) create mode 100644 experimental/python/databricks/bundles/jobs/_models/lifecycle.py create mode 100644 experimental/python/databricks/bundles/pipelines/_models/lifecycle.py create mode 100644 experimental/python/databricks/bundles/volumes/_models/lifecycle.py diff --git a/experimental/python/databricks/bundles/jobs/__init__.py b/experimental/python/databricks/bundles/jobs/__init__.py index dec48ac51c..3c3df3a693 100644 --- a/experimental/python/databricks/bundles/jobs/__init__.py +++ b/experimental/python/databricks/bundles/jobs/__init__.py @@ -134,6 +134,9 @@ "Library", "LibraryDict", "LibraryParam", + "Lifecycle", + "LifecycleDict", + "LifecycleParam", "LocalFileInfo", "LocalFileInfoDict", "LocalFileInfoParam", @@ -482,6 +485,11 @@ JobsHealthRulesParam, ) from databricks.bundles.jobs._models.library import Library, LibraryDict, LibraryParam +from databricks.bundles.jobs._models.lifecycle import ( + Lifecycle, + LifecycleDict, + LifecycleParam, +) from databricks.bundles.jobs._models.local_file_info import ( LocalFileInfo, LocalFileInfoDict, diff --git a/experimental/python/databricks/bundles/jobs/_models/job.py b/experimental/python/databricks/bundles/jobs/_models/job.py index e66e18f40b..8ba6d20d92 100644 --- a/experimental/python/databricks/bundles/jobs/_models/job.py +++ b/experimental/python/databricks/bundles/jobs/_models/job.py @@ -44,6 +44,7 @@ JobsHealthRules, JobsHealthRulesParam, ) +from databricks.bundles.jobs._models.lifecycle import Lifecycle, LifecycleParam from databricks.bundles.jobs._models.performance_target import ( PerformanceTarget, PerformanceTargetParam, @@ -116,6 +117,8 @@ class Job(Resource): A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings. """ + lifecycle: VariableOrOptional[Lifecycle] = None + max_concurrent_runs: VariableOrOptional[int] = None """ An optional maximum allowed number of concurrent runs of the job. @@ -256,6 +259,8 @@ class JobDict(TypedDict, total=False): A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings. """ + lifecycle: VariableOrOptional[LifecycleParam] + max_concurrent_runs: VariableOrOptional[int] """ An optional maximum allowed number of concurrent runs of the job. diff --git a/experimental/python/databricks/bundles/jobs/_models/lifecycle.py b/experimental/python/databricks/bundles/jobs/_models/lifecycle.py new file mode 100644 index 0000000000..a71b4e0be7 --- /dev/null +++ b/experimental/python/databricks/bundles/jobs/_models/lifecycle.py @@ -0,0 +1,32 @@ +from dataclasses import dataclass +from typing import TYPE_CHECKING, TypedDict + +from databricks.bundles.core._transform import _transform +from databricks.bundles.core._transform_to_json import _transform_to_json_value +from databricks.bundles.core._variable import VariableOrOptional + +if TYPE_CHECKING: + from typing_extensions import Self + + +@dataclass(kw_only=True) +class Lifecycle: + """""" + + prevent_destroy: VariableOrOptional[bool] = None + + @classmethod + def from_dict(cls, value: "LifecycleDict") -> "Self": + return _transform(cls, value) + + def as_dict(self) -> "LifecycleDict": + return _transform_to_json_value(self) # type:ignore + + +class LifecycleDict(TypedDict, total=False): + """""" + + prevent_destroy: VariableOrOptional[bool] + + +LifecycleParam = LifecycleDict | Lifecycle diff --git a/experimental/python/databricks/bundles/pipelines/__init__.py b/experimental/python/databricks/bundles/pipelines/__init__.py index 06dfc4390e..8801727328 100644 --- a/experimental/python/databricks/bundles/pipelines/__init__.py +++ b/experimental/python/databricks/bundles/pipelines/__init__.py @@ -56,6 +56,9 @@ "InitScriptInfo", "InitScriptInfoDict", "InitScriptInfoParam", + "Lifecycle", + "LifecycleDict", + "LifecycleParam", "LocalFileInfo", "LocalFileInfoDict", "LocalFileInfoParam", @@ -236,6 +239,11 @@ InitScriptInfoDict, InitScriptInfoParam, ) +from databricks.bundles.pipelines._models.lifecycle import ( + Lifecycle, + LifecycleDict, + LifecycleParam, +) from databricks.bundles.pipelines._models.local_file_info import ( LocalFileInfo, LocalFileInfoDict, diff --git a/experimental/python/databricks/bundles/pipelines/_models/lifecycle.py b/experimental/python/databricks/bundles/pipelines/_models/lifecycle.py new file mode 100644 index 0000000000..a71b4e0be7 --- /dev/null +++ b/experimental/python/databricks/bundles/pipelines/_models/lifecycle.py @@ -0,0 +1,32 @@ +from dataclasses import dataclass +from typing import TYPE_CHECKING, TypedDict + +from databricks.bundles.core._transform import _transform +from databricks.bundles.core._transform_to_json import _transform_to_json_value +from databricks.bundles.core._variable import VariableOrOptional + +if TYPE_CHECKING: + from typing_extensions import Self + + +@dataclass(kw_only=True) +class Lifecycle: + """""" + + prevent_destroy: VariableOrOptional[bool] = None + + @classmethod + def from_dict(cls, value: "LifecycleDict") -> "Self": + return _transform(cls, value) + + def as_dict(self) -> "LifecycleDict": + return _transform_to_json_value(self) # type:ignore + + +class LifecycleDict(TypedDict, total=False): + """""" + + prevent_destroy: VariableOrOptional[bool] + + +LifecycleParam = LifecycleDict | Lifecycle diff --git a/experimental/python/databricks/bundles/pipelines/_models/pipeline.py b/experimental/python/databricks/bundles/pipelines/_models/pipeline.py index 817689f338..90ae8356f1 100644 --- a/experimental/python/databricks/bundles/pipelines/_models/pipeline.py +++ b/experimental/python/databricks/bundles/pipelines/_models/pipeline.py @@ -25,6 +25,10 @@ IngestionPipelineDefinition, IngestionPipelineDefinitionParam, ) +from databricks.bundles.pipelines._models.lifecycle import ( + Lifecycle, + LifecycleParam, +) from databricks.bundles.pipelines._models.notifications import ( Notifications, NotificationsParam, @@ -143,6 +147,8 @@ class Pipeline(Resource): Libraries or code needed by this deployment. """ + lifecycle: VariableOrOptional[Lifecycle] = None + name: VariableOrOptional[str] = None """ Friendly identifier for this pipeline. @@ -301,6 +307,8 @@ class PipelineDict(TypedDict, total=False): Libraries or code needed by this deployment. """ + lifecycle: VariableOrOptional[LifecycleParam] + name: VariableOrOptional[str] """ Friendly identifier for this pipeline. diff --git a/experimental/python/databricks/bundles/volumes/__init__.py b/experimental/python/databricks/bundles/volumes/__init__.py index 177e6480b4..065713bf6c 100644 --- a/experimental/python/databricks/bundles/volumes/__init__.py +++ b/experimental/python/databricks/bundles/volumes/__init__.py @@ -1,4 +1,7 @@ __all__ = [ + "Lifecycle", + "LifecycleDict", + "LifecycleParam", "Volume", "VolumeDict", "VolumeGrant", @@ -12,6 +15,11 @@ ] +from databricks.bundles.volumes._models.lifecycle import ( + Lifecycle, + LifecycleDict, + LifecycleParam, +) from databricks.bundles.volumes._models.volume import Volume, VolumeDict, VolumeParam from databricks.bundles.volumes._models.volume_grant import ( VolumeGrant, diff --git a/experimental/python/databricks/bundles/volumes/_models/lifecycle.py b/experimental/python/databricks/bundles/volumes/_models/lifecycle.py new file mode 100644 index 0000000000..a71b4e0be7 --- /dev/null +++ b/experimental/python/databricks/bundles/volumes/_models/lifecycle.py @@ -0,0 +1,32 @@ +from dataclasses import dataclass +from typing import TYPE_CHECKING, TypedDict + +from databricks.bundles.core._transform import _transform +from databricks.bundles.core._transform_to_json import _transform_to_json_value +from databricks.bundles.core._variable import VariableOrOptional + +if TYPE_CHECKING: + from typing_extensions import Self + + +@dataclass(kw_only=True) +class Lifecycle: + """""" + + prevent_destroy: VariableOrOptional[bool] = None + + @classmethod + def from_dict(cls, value: "LifecycleDict") -> "Self": + return _transform(cls, value) + + def as_dict(self) -> "LifecycleDict": + return _transform_to_json_value(self) # type:ignore + + +class LifecycleDict(TypedDict, total=False): + """""" + + prevent_destroy: VariableOrOptional[bool] + + +LifecycleParam = LifecycleDict | Lifecycle diff --git a/experimental/python/databricks/bundles/volumes/_models/volume.py b/experimental/python/databricks/bundles/volumes/_models/volume.py index 65c5c7ab2f..20615ec27e 100644 --- a/experimental/python/databricks/bundles/volumes/_models/volume.py +++ b/experimental/python/databricks/bundles/volumes/_models/volume.py @@ -9,6 +9,7 @@ VariableOrList, VariableOrOptional, ) +from databricks.bundles.volumes._models.lifecycle import Lifecycle, LifecycleParam from databricks.bundles.volumes._models.volume_grant import ( VolumeGrant, VolumeGrantParam, @@ -45,6 +46,8 @@ class Volume(Resource): grants: VariableOrList[VolumeGrant] = field(default_factory=list) + lifecycle: VariableOrOptional[Lifecycle] = None + storage_location: VariableOrOptional[str] = None """ The storage location on the cloud @@ -85,6 +88,8 @@ class VolumeDict(TypedDict, total=False): grants: VariableOrList[VolumeGrantParam] + lifecycle: VariableOrOptional[LifecycleParam] + storage_location: VariableOrOptional[str] """ The storage location on the cloud From 404192e0e11f2b680b4c8d01bb5dd9266e07076c Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Mon, 8 Sep 2025 13:05:08 +0200 Subject: [PATCH 10/19] fixes --- .../prevent-destroy/out.direct-exp.txt | 15 ++++- .../prevent-destroy/out.terraform.txt | 25 ++++++++ .../bundle/lifecycle/prevent-destroy/script | 6 +- bundle/config/resources/database_catalog.go | 1 + .../config/resources/synced_database_table.go | 1 + bundle/deploy/terraform/lifecycle_test.go | 39 +++++++++++ .../terraform/tfdyn/convert_app_test.go | 33 ---------- .../terraform/tfdyn/convert_cluster_test.go | 32 ---------- .../terraform/tfdyn/convert_dashboard_test.go | 31 --------- .../tfdyn/convert_database_catalog.go | 7 ++ .../tfdyn/convert_database_instance_test.go | 30 --------- .../tfdyn/convert_experiment_test.go | 27 -------- .../terraform/tfdyn/convert_job_test.go | 27 -------- .../convert_model_serving_endpoint_test.go | 27 -------- .../terraform/tfdyn/convert_model_test.go | 27 -------- .../terraform/tfdyn/convert_pipeline_test.go | 27 -------- .../tfdyn/convert_quality_monitor_test.go | 29 --------- .../tfdyn/convert_registered_model_test.go | 27 -------- .../terraform/tfdyn/convert_schema_test.go | 28 -------- .../tfdyn/convert_sql_warehouse_test.go | 27 -------- .../tfdyn/convert_synced_database_table.go | 6 ++ .../terraform/tfdyn/convert_volume_test.go | 27 -------- bundle/phases/plan.go | 2 +- bundle/phases/plan_test.go | 64 +++++++++++++++++++ 24 files changed, 162 insertions(+), 403 deletions(-) create mode 100644 bundle/deploy/terraform/lifecycle_test.go create mode 100644 bundle/phases/plan_test.go diff --git a/acceptance/bundle/lifecycle/prevent-destroy/out.direct-exp.txt b/acceptance/bundle/lifecycle/prevent-destroy/out.direct-exp.txt index 47b9844786..cdde72912c 100644 --- a/acceptance/bundle/lifecycle/prevent-destroy/out.direct-exp.txt +++ b/acceptance/bundle/lifecycle/prevent-destroy/out.direct-exp.txt @@ -1,17 +1,25 @@ +>>> errcode [CLI] bundle plan + >>> musterr [CLI] bundle destroy --auto-approve -Error: resource my_pipelines has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. To avoid this error, disable lifecycle.prevent_destroy +Error: resource my_pipelines has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. To avoid this error, disable lifecycle.prevent_destroy for pipelines.my_pipelines Exit code (musterr): 1 +>>> errcode [CLI] bundle plan +recreate pipelines.my_pipelines + >>> musterr [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/prevent-destroy/default/files... -Error: resource my_pipelines has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. To avoid this error, disable lifecycle.prevent_destroy +Error: resource my_pipelines has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. To avoid this error, disable lifecycle.prevent_destroy for pipelines.my_pipelines Exit code (musterr): 1 +>>> errcode [CLI] bundle plan +recreate pipelines.my_pipelines + >>> [CLI] bundle deploy --auto-approve Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/prevent-destroy/default/files... @@ -24,6 +32,9 @@ Deploying resources... Updating deployment state... Deployment complete! +>>> errcode [CLI] bundle plan +delete pipelines.my_pipelines + >>> [CLI] bundle deploy --auto-approve Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/prevent-destroy/default/files... diff --git a/acceptance/bundle/lifecycle/prevent-destroy/out.terraform.txt b/acceptance/bundle/lifecycle/prevent-destroy/out.terraform.txt index d67c3f84e7..01353aa146 100644 --- a/acceptance/bundle/lifecycle/prevent-destroy/out.terraform.txt +++ b/acceptance/bundle/lifecycle/prevent-destroy/out.terraform.txt @@ -1,4 +1,6 @@ +>>> errcode [CLI] bundle plan + >>> musterr [CLI] bundle destroy --auto-approve Error: exit status 1 @@ -16,6 +18,23 @@ the scope of the plan using the -target flag. Exit code (musterr): 1 +>>> errcode [CLI] bundle plan +Error: exit status 1 + +Error: Instance cannot be destroyed + + on bundle.tf.json line 15, in resource.databricks_pipeline: + 15: "my_pipelines": { + +Resource databricks_pipeline.my_pipelines has lifecycle.prevent_destroy set, +but the plan calls for this resource to be destroyed. To avoid this error and +continue with the plan, either disable lifecycle.prevent_destroy or reduce +the scope of the plan using the -target flag. + + + +Exit code: 1 + >>> musterr [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/prevent-destroy/default/files... Error: exit status 1 @@ -34,6 +53,9 @@ the scope of the plan using the -target flag. Exit code (musterr): 1 +>>> errcode [CLI] bundle plan +recreate pipelines.my_pipelines + >>> [CLI] bundle deploy --auto-approve Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/prevent-destroy/default/files... @@ -46,6 +68,9 @@ Deploying resources... Updating deployment state... Deployment complete! +>>> errcode [CLI] bundle plan +delete pipelines.my_pipelines + >>> [CLI] bundle deploy --auto-approve Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/prevent-destroy/default/files... diff --git a/acceptance/bundle/lifecycle/prevent-destroy/script b/acceptance/bundle/lifecycle/prevent-destroy/script index 6e02c58b6f..3f8e8f440f 100644 --- a/acceptance/bundle/lifecycle/prevent-destroy/script +++ b/acceptance/bundle/lifecycle/prevent-destroy/script @@ -2,17 +2,21 @@ trace $CLI bundle validate trace $CLI bundle deploy -trace musterr $CLI bundle destroy --auto-approve >out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 +trace errcode $CLI bundle plan >out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 +trace musterr $CLI bundle destroy --auto-approve >>out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 # Changing the catalog name, deploy must fail because pipeline will be recreated update_file.py databricks.yml 'catalog: main' 'catalog: mainnew' +trace errcode $CLI bundle plan >>out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 trace musterr $CLI bundle deploy >>out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 # Removing the prevent_destroy, deploy must succeed update_file.py databricks.yml 'prevent_destroy: true' 'prevent_destroy: false' +trace errcode $CLI bundle plan >>out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 trace $CLI bundle deploy --auto-approve >>out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 update_file.py databricks.yml 'prevent_destroy: false' 'prevent_destroy: true' # Removing the job, deploy must succeed update_file.py databricks.yml '<<: *pipeline_base' '' +trace errcode $CLI bundle plan >>out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 trace $CLI bundle deploy --auto-approve >>out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 diff --git a/bundle/config/resources/database_catalog.go b/bundle/config/resources/database_catalog.go index 159759b430..cb0c82b847 100644 --- a/bundle/config/resources/database_catalog.go +++ b/bundle/config/resources/database_catalog.go @@ -16,6 +16,7 @@ type DatabaseCatalog struct { ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` database.DatabaseCatalog + Lifecycle Lifecycle `json:"lifecycle,omitempty"` } func (d *DatabaseCatalog) Exists(ctx context.Context, w *databricks.WorkspaceClient, name string) (bool, error) { diff --git a/bundle/config/resources/synced_database_table.go b/bundle/config/resources/synced_database_table.go index 0c433daf51..f863bf350b 100644 --- a/bundle/config/resources/synced_database_table.go +++ b/bundle/config/resources/synced_database_table.go @@ -16,6 +16,7 @@ type SyncedDatabaseTable struct { ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` database.SyncedDatabaseTable + Lifecycle Lifecycle `json:"lifecycle,omitempty"` } func (s *SyncedDatabaseTable) Exists(ctx context.Context, w *databricks.WorkspaceClient, name string) (bool, error) { diff --git a/bundle/deploy/terraform/lifecycle_test.go b/bundle/deploy/terraform/lifecycle_test.go new file mode 100644 index 0000000000..697a2270cd --- /dev/null +++ b/bundle/deploy/terraform/lifecycle_test.go @@ -0,0 +1,39 @@ +package terraform + +import ( + "context" + "encoding/json" + "testing" + + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/libs/dyn" + "github.com/stretchr/testify/require" +) + +func TestConvertLifecycleForAllResources(t *testing.T) { + supportedResources := config.SupportedResources() + ctx := context.Background() + + for resourceType := range supportedResources { + t.Run(resourceType, func(t *testing.T) { + vin := dyn.NewValue(map[string]dyn.Value{ + "resources": dyn.NewValue(map[string]dyn.Value{ + resourceType: dyn.NewValue(map[string]dyn.Value{ + "test_resource": dyn.NewValue(map[string]dyn.Value{ + "lifecycle": dyn.NewValue(map[string]dyn.Value{ + "prevent_destroy": dyn.NewValue(true, nil), + }, nil), + }, nil), + }, nil), + }, nil), + }, nil) + + tfroot, err := BundleToTerraformWithDynValue(ctx, vin) + require.NoError(t, err) + + bytes, err := json.Marshal(tfroot.Resource) + require.NoError(t, err) + require.Contains(t, string(bytes), `"lifecycle":{"prevent_destroy":true}`) + }) + } +} diff --git a/bundle/deploy/terraform/tfdyn/convert_app_test.go b/bundle/deploy/terraform/tfdyn/convert_app_test.go index d58e9ff214..f95a6419a5 100644 --- a/bundle/deploy/terraform/tfdyn/convert_app_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_app_test.go @@ -156,36 +156,3 @@ func TestConvertAppWithNoDescription(t *testing.T) { }, }, app) } - -func TestConvertAppWithLifecycle(t *testing.T) { - src := resources.App{ - SourceCodePath: "./app", - Config: map[string]any{ - "command": []string{"python", "app.py"}, - }, - App: apps.App{ - Name: "app_id", - }, - Lifecycle: resources.Lifecycle{ - PreventDestroy: true, - }, - } - - vin, err := convert.FromTyped(src, dyn.NilValue) - require.NoError(t, err) - - ctx := context.Background() - out := schema.NewResources() - err = appConverter{}.Convert(ctx, "my_app", vin, out) - require.NoError(t, err) - - // Assert equality on the app - assert.Equal(t, map[string]any{ - "name": "app_id", - "description": "", - "no_compute": true, - "lifecycle": map[string]any{ - "prevent_destroy": true, - }, - }, out.App["my_app"]) -} diff --git a/bundle/deploy/terraform/tfdyn/convert_cluster_test.go b/bundle/deploy/terraform/tfdyn/convert_cluster_test.go index df0d8b3dda..2e1f71f30a 100644 --- a/bundle/deploy/terraform/tfdyn/convert_cluster_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_cluster_test.go @@ -94,35 +94,3 @@ func TestConvertCluster(t *testing.T) { }, }, out.Permissions["cluster_my_cluster"]) } - -func TestConvertClusterWithLifecycle(t *testing.T) { - src := resources.Cluster{ - ClusterSpec: compute.ClusterSpec{ - NumWorkers: 3, - SparkVersion: "13.3.x-scala2.12", - ClusterName: "cluster", - }, - Lifecycle: resources.Lifecycle{ - PreventDestroy: true, - }, - } - - vin, err := convert.FromTyped(src, dyn.NilValue) - require.NoError(t, err) - - ctx := context.Background() - out := schema.NewResources() - err = clusterConverter{}.Convert(ctx, "my_cluster", vin, out) - require.NoError(t, err) - - // Assert equality on the cluster - assert.Equal(t, map[string]any{ - "num_workers": int64(3), - "spark_version": "13.3.x-scala2.12", - "cluster_name": "cluster", - "no_wait": true, - "lifecycle": map[string]any{ - "prevent_destroy": true, - }, - }, out.Cluster["my_cluster"]) -} diff --git a/bundle/deploy/terraform/tfdyn/convert_dashboard_test.go b/bundle/deploy/terraform/tfdyn/convert_dashboard_test.go index e4e1d4cf68..18a61e5c77 100644 --- a/bundle/deploy/terraform/tfdyn/convert_dashboard_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_dashboard_test.go @@ -112,34 +112,3 @@ func TestConvertDashboardSerializedDashboardAny(t *testing.T) { // Assert that the "file_path" is dropped. assert.NotContains(t, out.Dashboard["my_dashboard"], "file_path") } - -func TestConvertDashboardWithLifecycle(t *testing.T) { - src := resources.Dashboard{ - DashboardConfig: resources.DashboardConfig{ - Dashboard: dashboards.Dashboard{ - DisplayName: "my dashboard", - WarehouseId: "f00dcafe", - }, - }, - Lifecycle: resources.Lifecycle{ - PreventDestroy: true, - }, - } - - vin, err := convert.FromTyped(src, dyn.NilValue) - require.NoError(t, err) - - ctx := context.Background() - out := schema.NewResources() - err = dashboardConverter{}.Convert(ctx, "my_dashboard", vin, out) - require.NoError(t, err) - - // Assert equality on the dashboard - assert.Equal(t, map[string]any{ - "display_name": "my dashboard", - "warehouse_id": "f00dcafe", - "lifecycle": map[string]any{ - "prevent_destroy": true, - }, - }, out.Dashboard["my_dashboard"]) -} diff --git a/bundle/deploy/terraform/tfdyn/convert_database_catalog.go b/bundle/deploy/terraform/tfdyn/convert_database_catalog.go index 3aceff5e88..716c3baf81 100644 --- a/bundle/deploy/terraform/tfdyn/convert_database_catalog.go +++ b/bundle/deploy/terraform/tfdyn/convert_database_catalog.go @@ -18,6 +18,13 @@ func (d databaseCatalogConverter) Convert(ctx context.Context, key string, vin d for _, diag := range diags { log.Debugf(ctx, "database Catalog normalization diagnostic: %s", diag.Summary) } + + var err error + vout, err = convertLifecycle(ctx, vout, vin.Get("lifecycle")) + if err != nil { + return err + } + out.DatabaseDatabaseCatalog[key] = vout.AsAny() return nil diff --git a/bundle/deploy/terraform/tfdyn/convert_database_instance_test.go b/bundle/deploy/terraform/tfdyn/convert_database_instance_test.go index 50d1c5ae86..6671282cbf 100644 --- a/bundle/deploy/terraform/tfdyn/convert_database_instance_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_database_instance_test.go @@ -117,33 +117,3 @@ func TestConvertDatabaseInstanceWithPermissions(t *testing.T) { }, }, out.Permissions["database_instance_db_with_permissions"]) } - -func TestConvertDatabaseInstanceWithLifecycle(t *testing.T) { - src := resources.DatabaseInstance{ - DatabaseInstance: database.DatabaseInstance{ - Name: "test-db-instance", - Capacity: "CU_4", - }, - Lifecycle: resources.Lifecycle{ - PreventDestroy: true, - }, - } - - vin, err := convert.FromTyped(src, dyn.NilValue) - require.NoError(t, err) - - ctx := context.Background() - out := schema.NewResources() - err = databaseInstanceConverter{}.Convert(ctx, "my_database_instance", vin, out) - require.NoError(t, err) - - // Assert equality on the database instance - assert.Equal(t, map[string]any{ - "name": "test-db-instance", - "capacity": "CU_4", - "purge_on_delete": true, - "lifecycle": map[string]any{ - "prevent_destroy": true, - }, - }, out.DatabaseInstance["my_database_instance"]) -} diff --git a/bundle/deploy/terraform/tfdyn/convert_experiment_test.go b/bundle/deploy/terraform/tfdyn/convert_experiment_test.go index 8006cf94e1..44c2cd820a 100644 --- a/bundle/deploy/terraform/tfdyn/convert_experiment_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_experiment_test.go @@ -50,30 +50,3 @@ func TestConvertExperiment(t *testing.T) { }, }, out.Permissions["mlflow_experiment_my_experiment"]) } - -func TestConvertExperimentWithLifecycle(t *testing.T) { - src := resources.MlflowExperiment{ - Experiment: ml.Experiment{ - Name: "name", - }, - Lifecycle: resources.Lifecycle{ - PreventDestroy: true, - }, - } - - vin, err := convert.FromTyped(src, dyn.NilValue) - require.NoError(t, err) - - ctx := context.Background() - out := schema.NewResources() - err = experimentConverter{}.Convert(ctx, "my_experiment", vin, out) - require.NoError(t, err) - - // Assert equality on the experiment - assert.Equal(t, map[string]any{ - "name": "name", - "lifecycle": map[string]any{ - "prevent_destroy": true, - }, - }, out.MlflowExperiment["my_experiment"]) -} diff --git a/bundle/deploy/terraform/tfdyn/convert_job_test.go b/bundle/deploy/terraform/tfdyn/convert_job_test.go index 5b812a127c..a7c506d592 100644 --- a/bundle/deploy/terraform/tfdyn/convert_job_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_job_test.go @@ -286,30 +286,3 @@ func TestConvertJobApplyPolicyDefaultValues(t *testing.T) { }, }, out.Job["my_job"]) } - -func TestConvertJobWithLifecycle(t *testing.T) { - src := resources.Job{ - JobSettings: jobs.JobSettings{ - Name: "my job", - }, - Lifecycle: resources.Lifecycle{ - PreventDestroy: true, - }, - } - - vin, err := convert.FromTyped(src, dyn.NilValue) - require.NoError(t, err) - - ctx := context.Background() - out := schema.NewResources() - err = jobConverter{}.Convert(ctx, "my_job", vin, out) - require.NoError(t, err) - - // Assert equality on the job - assert.Equal(t, map[string]any{ - "name": "my job", - "lifecycle": map[string]any{ - "prevent_destroy": true, - }, - }, out.Job["my_job"]) -} diff --git a/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint_test.go b/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint_test.go index f408d6305b..029478a10a 100644 --- a/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint_test.go @@ -86,30 +86,3 @@ func TestConvertModelServingEndpoint(t *testing.T) { }, }, out.Permissions["model_serving_my_model_serving_endpoint"]) } - -func TestConvertModelServingEndpointWithLifecycle(t *testing.T) { - src := resources.ModelServingEndpoint{ - CreateServingEndpoint: serving.CreateServingEndpoint{ - Name: "name", - }, - Lifecycle: resources.Lifecycle{ - PreventDestroy: true, - }, - } - - vin, err := convert.FromTyped(src, dyn.NilValue) - require.NoError(t, err) - - ctx := context.Background() - out := schema.NewResources() - err = modelServingEndpointConverter{}.Convert(ctx, "my_model_serving_endpoint", vin, out) - require.NoError(t, err) - - // Assert equality on the model serving endpoint - assert.Equal(t, map[string]any{ - "name": "name", - "lifecycle": map[string]any{ - "prevent_destroy": true, - }, - }, out.ModelServing["my_model_serving_endpoint"]) -} diff --git a/bundle/deploy/terraform/tfdyn/convert_model_test.go b/bundle/deploy/terraform/tfdyn/convert_model_test.go index a395c1cb29..0b36034514 100644 --- a/bundle/deploy/terraform/tfdyn/convert_model_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_model_test.go @@ -72,30 +72,3 @@ func TestConvertModel(t *testing.T) { }, }, out.Permissions["mlflow_model_my_model"]) } - -func TestConvertModelWithLifecycle(t *testing.T) { - src := resources.MlflowModel{ - CreateModelRequest: ml.CreateModelRequest{ - Name: "name", - }, - Lifecycle: resources.Lifecycle{ - PreventDestroy: true, - }, - } - - vin, err := convert.FromTyped(src, dyn.NilValue) - require.NoError(t, err) - - ctx := context.Background() - out := schema.NewResources() - err = modelConverter{}.Convert(ctx, "my_model", vin, out) - require.NoError(t, err) - - // Assert equality on the model - assert.Equal(t, map[string]any{ - "name": "name", - "lifecycle": map[string]any{ - "prevent_destroy": true, - }, - }, out.MlflowModel["my_model"]) -} diff --git a/bundle/deploy/terraform/tfdyn/convert_pipeline_test.go b/bundle/deploy/terraform/tfdyn/convert_pipeline_test.go index 2fe5b0d4c4..ed6bd70a08 100644 --- a/bundle/deploy/terraform/tfdyn/convert_pipeline_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_pipeline_test.go @@ -139,30 +139,3 @@ func TestConvertPipeline(t *testing.T) { }, }, out.Permissions["pipeline_my_pipeline"]) } - -func TestConvertPipelineWithLifecycle(t *testing.T) { - src := resources.Pipeline{ - CreatePipeline: pipelines.CreatePipeline{ - Name: "my pipeline", - }, - Lifecycle: resources.Lifecycle{ - PreventDestroy: true, - }, - } - - vin, err := convert.FromTyped(src, dyn.NilValue) - require.NoError(t, err) - - ctx := context.Background() - out := schema.NewResources() - err = pipelineConverter{}.Convert(ctx, "my_pipeline", vin, out) - require.NoError(t, err) - - // Assert equality on the pipeline - assert.Equal(t, map[string]any{ - "name": "my pipeline", - "lifecycle": map[string]any{ - "prevent_destroy": true, - }, - }, out.Pipeline["my_pipeline"]) -} diff --git a/bundle/deploy/terraform/tfdyn/convert_quality_monitor_test.go b/bundle/deploy/terraform/tfdyn/convert_quality_monitor_test.go index 8381d15ffd..4e457ca3b3 100644 --- a/bundle/deploy/terraform/tfdyn/convert_quality_monitor_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_quality_monitor_test.go @@ -44,32 +44,3 @@ func TestConvertQualityMonitor(t *testing.T) { }, }, out.QualityMonitor["my_monitor"]) } - -func TestConvertQualityMonitorWithLifecycle(t *testing.T) { - src := resources.QualityMonitor{ - TableName: "test_table_name", - CreateMonitor: catalog.CreateMonitor{ - AssetsDir: "assets_dir", - }, - Lifecycle: resources.Lifecycle{ - PreventDestroy: true, - }, - } - - vin, err := convert.FromTyped(src, dyn.NilValue) - require.NoError(t, err) - - ctx := context.Background() - out := schema.NewResources() - err = qualityMonitorConverter{}.Convert(ctx, "my_monitor", vin, out) - require.NoError(t, err) - - // Assert equality on the quality monitor - assert.Equal(t, map[string]any{ - "assets_dir": "assets_dir", - "table_name": "test_table_name", - "lifecycle": map[string]any{ - "prevent_destroy": true, - }, - }, out.QualityMonitor["my_monitor"]) -} diff --git a/bundle/deploy/terraform/tfdyn/convert_registered_model_test.go b/bundle/deploy/terraform/tfdyn/convert_registered_model_test.go index dba297b165..633ec3eee4 100644 --- a/bundle/deploy/terraform/tfdyn/convert_registered_model_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_registered_model_test.go @@ -56,30 +56,3 @@ func TestConvertRegisteredModel(t *testing.T) { }, }, out.Grants["registered_model_my_registered_model"]) } - -func TestConvertRegisteredModelWithLifecycle(t *testing.T) { - src := resources.RegisteredModel{ - CreateRegisteredModelRequest: catalog.CreateRegisteredModelRequest{ - Name: "name", - }, - Lifecycle: resources.Lifecycle{ - PreventDestroy: true, - }, - } - - vin, err := convert.FromTyped(src, dyn.NilValue) - require.NoError(t, err) - - ctx := context.Background() - out := schema.NewResources() - err = registeredModelConverter{}.Convert(ctx, "my_registered_model", vin, out) - require.NoError(t, err) - - // Assert equality on the registered model - assert.Equal(t, map[string]any{ - "name": "name", - "lifecycle": map[string]any{ - "prevent_destroy": true, - }, - }, out.RegisteredModel["my_registered_model"]) -} diff --git a/bundle/deploy/terraform/tfdyn/convert_schema_test.go b/bundle/deploy/terraform/tfdyn/convert_schema_test.go index d18fd197db..cb5ea0d1ca 100644 --- a/bundle/deploy/terraform/tfdyn/convert_schema_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_schema_test.go @@ -77,31 +77,3 @@ func TestConvertSchema(t *testing.T) { }, }, out.Grants["schema_my_schema"]) } - -func TestConvertSchemaWithLifecycle(t *testing.T) { - src := resources.Schema{ - CreateSchema: catalog.CreateSchema{ - Name: "name", - }, - Lifecycle: resources.Lifecycle{ - PreventDestroy: true, - }, - } - - vin, err := convert.FromTyped(src, dyn.NilValue) - require.NoError(t, err) - - ctx := context.Background() - out := schema.NewResources() - err = schemaConverter{}.Convert(ctx, "my_schema", vin, out) - require.NoError(t, err) - - // Assert equality on the schema - assert.Equal(t, map[string]any{ - "name": "name", - "force_destroy": true, - "lifecycle": map[string]any{ - "prevent_destroy": true, - }, - }, out.Schema["my_schema"]) -} diff --git a/bundle/deploy/terraform/tfdyn/convert_sql_warehouse_test.go b/bundle/deploy/terraform/tfdyn/convert_sql_warehouse_test.go index a5ee21e2cd..05e58121bf 100644 --- a/bundle/deploy/terraform/tfdyn/convert_sql_warehouse_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_sql_warehouse_test.go @@ -65,30 +65,3 @@ func TestConvertSqlWarehouse(t *testing.T) { "min_num_clusters": int64(1), }, sqlWarehouse) } - -func TestConvertSqlWarehouseWithLifecycle(t *testing.T) { - src := resources.SqlWarehouse{ - CreateWarehouseRequest: sql.CreateWarehouseRequest{ - Name: "test_sql_warehouse", - }, - Lifecycle: resources.Lifecycle{ - PreventDestroy: true, - }, - } - - vin, err := convert.FromTyped(src, dyn.NilValue) - require.NoError(t, err) - - ctx := context.Background() - out := schema.NewResources() - err = sqlWarehouseConverter{}.Convert(ctx, "test_sql_warehouse", vin, out) - require.NoError(t, err) - - // Assert equality on the SQL warehouse - assert.Equal(t, map[string]any{ - "name": "test_sql_warehouse", - "lifecycle": map[string]any{ - "prevent_destroy": true, - }, - }, out.SqlEndpoint["test_sql_warehouse"]) -} diff --git a/bundle/deploy/terraform/tfdyn/convert_synced_database_table.go b/bundle/deploy/terraform/tfdyn/convert_synced_database_table.go index 8d485e7fdf..5e64711629 100644 --- a/bundle/deploy/terraform/tfdyn/convert_synced_database_table.go +++ b/bundle/deploy/terraform/tfdyn/convert_synced_database_table.go @@ -18,6 +18,12 @@ func (s syncedDatabaseTableConverter) Convert(ctx context.Context, key string, v for _, diag := range diags { log.Debugf(ctx, "synced database table normalization diagnostic: %s", diag.Summary) } + + var err error + vout, err = convertLifecycle(ctx, vout, vin.Get("lifecycle")) + if err != nil { + return err + } out.DatabaseSyncedDatabaseTable[key] = vout.AsAny() return nil diff --git a/bundle/deploy/terraform/tfdyn/convert_volume_test.go b/bundle/deploy/terraform/tfdyn/convert_volume_test.go index 9e76b0af62..92c64212b9 100644 --- a/bundle/deploy/terraform/tfdyn/convert_volume_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_volume_test.go @@ -72,30 +72,3 @@ func TestConvertVolume(t *testing.T) { }, }, out.Grants["volume_my_volume"]) } - -func TestConvertVolumeWithLifecycle(t *testing.T) { - src := resources.Volume{ - CreateVolumeRequestContent: catalog.CreateVolumeRequestContent{ - Name: "name", - }, - Lifecycle: resources.Lifecycle{ - PreventDestroy: true, - }, - } - - vin, err := convert.FromTyped(src, dyn.NilValue) - require.NoError(t, err) - - ctx := context.Background() - out := schema.NewResources() - err = volumeConverter{}.Convert(ctx, "my_volume", vin, out) - require.NoError(t, err) - - // Assert equality on the volume - assert.Equal(t, map[string]any{ - "name": "name", - "lifecycle": map[string]any{ - "prevent_destroy": true, - }, - }, out.Volume["my_volume"]) -} diff --git a/bundle/phases/plan.go b/bundle/phases/plan.go index 5931cd8fc9..61f141a48c 100644 --- a/bundle/phases/plan.go +++ b/bundle/phases/plan.go @@ -67,7 +67,7 @@ func checkForPreventDestroy(b *bundle.Bundle, actions []deployplan.Action, isDes preventDestroyV := lifecycleV.Get("prevent_destroy") preventDestroy, ok := preventDestroyV.AsBool() if ok && preventDestroy { - return fmt.Errorf("resource %s has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. To avoid this error, disable lifecycle.prevent_destroy", action.Key) + return fmt.Errorf("resource %s has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. To avoid this error, disable lifecycle.prevent_destroy for %s.%s ", action.Key, action.Group, action.Key) } } } diff --git a/bundle/phases/plan_test.go b/bundle/phases/plan_test.go new file mode 100644 index 0000000000..3091073bfc --- /dev/null +++ b/bundle/phases/plan_test.go @@ -0,0 +1,64 @@ +package phases + +import ( + "context" + "fmt" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/deployplan" + "github.com/databricks/cli/libs/dyn" + "github.com/stretchr/testify/require" +) + +func TestCheckPreventDestroyForAllResources(t *testing.T) { + supportedResources := config.SupportedResources() + + for resourceType := range supportedResources { + t.Run(resourceType, func(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Bundle: config.Bundle{ + Name: "test", + }, + Resources: config.Resources{}, + }, + } + + ctx := context.Background() + bundle.ApplyFuncContext(ctx, b, func(ctx context.Context, b *bundle.Bundle) { + // Use Mutate to set the configuration dynamically + err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { + // Set the resource with lifecycle.prevent_destroy = true + return dyn.Set(v, "resources", dyn.NewValue(map[string]dyn.Value{ + resourceType: dyn.NewValue(map[string]dyn.Value{ + "test_resource": dyn.NewValue(map[string]dyn.Value{ + "lifecycle": dyn.NewValue(map[string]dyn.Value{ + "prevent_destroy": dyn.NewValue(true, nil), + }, nil), + }, nil), + }, nil), + }, nil)) + }) + require.NoError(t, err) + }) + + actions := []deployplan.Action{ + { + ResourceNode: deployplan.ResourceNode{ + Group: resourceType, + Key: "test_resource", + }, + ActionType: deployplan.ActionTypeRecreate, + }, + } + + err := checkForPreventDestroy(b, actions, false) + require.Error(t, err) + require.Contains(t, err.Error(), "resource test_resource has lifecycle.prevent_destroy set") + require.Contains(t, err.Error(), "but the plan calls for this resource to be recreated or destroyed") + require.Contains(t, err.Error(), fmt.Sprintf("disable lifecycle.prevent_destroy for %s.test_resource", resourceType)) + }) + } +} From 1f67f4fd1378a0cd47a61664a4a4112a5329cb05 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Mon, 8 Sep 2025 13:07:03 +0200 Subject: [PATCH 11/19] fixed schema --- bundle/internal/schema/annotations.yml | 7 +++++++ bundle/schema/jsonschema.json | 6 ++++++ 2 files changed, 13 insertions(+) diff --git a/bundle/internal/schema/annotations.yml b/bundle/internal/schema/annotations.yml index 3fbf95e8ba..51b803274e 100644 --- a/bundle/internal/schema/annotations.yml +++ b/bundle/internal/schema/annotations.yml @@ -478,6 +478,10 @@ github.com/databricks/cli/bundle/config/resources.DashboardPermission: "user_name": "description": |- PLACEHOLDER +github.com/databricks/cli/bundle/config/resources.DatabaseCatalog: + "lifecycle": + "description": |- + PLACEHOLDER github.com/databricks/cli/bundle/config/resources.DatabaseInstancePermission: "group_name": "description": |- @@ -647,6 +651,9 @@ github.com/databricks/cli/bundle/config/resources.SyncedDatabaseTable: "effective_logical_database_name": "description": |- PLACEHOLDER + "lifecycle": + "description": |- + PLACEHOLDER "logical_database_name": "description": |- PLACEHOLDER diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index fdfb93c185..0c43bee124 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -534,6 +534,9 @@ "description": "The name of the database (in a instance) associated with the catalog.", "$ref": "#/$defs/string" }, + "lifecycle": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" + }, "name": { "description": "The name of the catalog in UC.", "$ref": "#/$defs/string" @@ -1857,6 +1860,9 @@ "effective_logical_database_name": { "$ref": "#/$defs/string" }, + "lifecycle": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" + }, "logical_database_name": { "$ref": "#/$defs/string" }, From 3181033119a2532baae3e8cb142ce4251405aa15 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Mon, 8 Sep 2025 13:10:29 +0200 Subject: [PATCH 12/19] whitespaces --- .../bundle/lifecycle/prevent-destroy/out.direct-exp.txt | 4 ++-- bundle/phases/plan.go | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/acceptance/bundle/lifecycle/prevent-destroy/out.direct-exp.txt b/acceptance/bundle/lifecycle/prevent-destroy/out.direct-exp.txt index cdde72912c..7c9c750fef 100644 --- a/acceptance/bundle/lifecycle/prevent-destroy/out.direct-exp.txt +++ b/acceptance/bundle/lifecycle/prevent-destroy/out.direct-exp.txt @@ -2,7 +2,7 @@ >>> errcode [CLI] bundle plan >>> musterr [CLI] bundle destroy --auto-approve -Error: resource my_pipelines has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. To avoid this error, disable lifecycle.prevent_destroy for pipelines.my_pipelines +Error: resource my_pipelines has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. To avoid this error, disable lifecycle.prevent_destroy for pipelines.my_pipelines Exit code (musterr): 1 @@ -12,7 +12,7 @@ recreate pipelines.my_pipelines >>> musterr [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/prevent-destroy/default/files... -Error: resource my_pipelines has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. To avoid this error, disable lifecycle.prevent_destroy for pipelines.my_pipelines +Error: resource my_pipelines has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. To avoid this error, disable lifecycle.prevent_destroy for pipelines.my_pipelines Exit code (musterr): 1 diff --git a/bundle/phases/plan.go b/bundle/phases/plan.go index 61f141a48c..56fd5e73d2 100644 --- a/bundle/phases/plan.go +++ b/bundle/phases/plan.go @@ -67,7 +67,7 @@ func checkForPreventDestroy(b *bundle.Bundle, actions []deployplan.Action, isDes preventDestroyV := lifecycleV.Get("prevent_destroy") preventDestroy, ok := preventDestroyV.AsBool() if ok && preventDestroy { - return fmt.Errorf("resource %s has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. To avoid this error, disable lifecycle.prevent_destroy for %s.%s ", action.Key, action.Group, action.Key) + return fmt.Errorf("resource %s has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. To avoid this error, disable lifecycle.prevent_destroy for %s.%s", action.Key, action.Group, action.Key) } } } From a32e69ac27022723a37ff85d17a3c32c3c42b813 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Mon, 8 Sep 2025 13:14:14 +0200 Subject: [PATCH 13/19] codegen python --- .../databricks/bundles/schemas/__init__.py | 8 +++++ .../bundles/schemas/_models/lifecycle.py | 32 +++++++++++++++++++ .../bundles/schemas/_models/schema.py | 5 +++ 3 files changed, 45 insertions(+) create mode 100644 experimental/python/databricks/bundles/schemas/_models/lifecycle.py diff --git a/experimental/python/databricks/bundles/schemas/__init__.py b/experimental/python/databricks/bundles/schemas/__init__.py index 69e9d4a279..d4d0fa33a3 100644 --- a/experimental/python/databricks/bundles/schemas/__init__.py +++ b/experimental/python/databricks/bundles/schemas/__init__.py @@ -1,4 +1,7 @@ __all__ = [ + "Lifecycle", + "LifecycleDict", + "LifecycleParam", "Schema", "SchemaDict", "SchemaGrant", @@ -10,6 +13,11 @@ ] +from databricks.bundles.schemas._models.lifecycle import ( + Lifecycle, + LifecycleDict, + LifecycleParam, +) from databricks.bundles.schemas._models.schema import Schema, SchemaDict, SchemaParam from databricks.bundles.schemas._models.schema_grant import ( SchemaGrant, diff --git a/experimental/python/databricks/bundles/schemas/_models/lifecycle.py b/experimental/python/databricks/bundles/schemas/_models/lifecycle.py new file mode 100644 index 0000000000..a71b4e0be7 --- /dev/null +++ b/experimental/python/databricks/bundles/schemas/_models/lifecycle.py @@ -0,0 +1,32 @@ +from dataclasses import dataclass +from typing import TYPE_CHECKING, TypedDict + +from databricks.bundles.core._transform import _transform +from databricks.bundles.core._transform_to_json import _transform_to_json_value +from databricks.bundles.core._variable import VariableOrOptional + +if TYPE_CHECKING: + from typing_extensions import Self + + +@dataclass(kw_only=True) +class Lifecycle: + """""" + + prevent_destroy: VariableOrOptional[bool] = None + + @classmethod + def from_dict(cls, value: "LifecycleDict") -> "Self": + return _transform(cls, value) + + def as_dict(self) -> "LifecycleDict": + return _transform_to_json_value(self) # type:ignore + + +class LifecycleDict(TypedDict, total=False): + """""" + + prevent_destroy: VariableOrOptional[bool] + + +LifecycleParam = LifecycleDict | Lifecycle diff --git a/experimental/python/databricks/bundles/schemas/_models/schema.py b/experimental/python/databricks/bundles/schemas/_models/schema.py index f3de56b1e8..d97415fa15 100644 --- a/experimental/python/databricks/bundles/schemas/_models/schema.py +++ b/experimental/python/databricks/bundles/schemas/_models/schema.py @@ -10,6 +10,7 @@ VariableOrList, VariableOrOptional, ) +from databricks.bundles.schemas._models.lifecycle import Lifecycle, LifecycleParam from databricks.bundles.schemas._models.schema_grant import ( SchemaGrant, SchemaGrantParam, @@ -40,6 +41,8 @@ class Schema(Resource): grants: VariableOrList[SchemaGrant] = field(default_factory=list) + lifecycle: VariableOrOptional[Lifecycle] = None + properties: VariableOrDict[str] = field(default_factory=dict) storage_root: VariableOrOptional[str] = None @@ -75,6 +78,8 @@ class SchemaDict(TypedDict, total=False): grants: VariableOrList[SchemaGrantParam] + lifecycle: VariableOrOptional[LifecycleParam] + properties: VariableOrDict[str] storage_root: VariableOrOptional[str] From 109d45361daf1c01bcd2e687350e60cfc8b96d4d Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Mon, 8 Sep 2025 14:43:14 +0200 Subject: [PATCH 14/19] fixes --- .../bundle/lifecycle/prevent-destroy/script | 2 +- bundle/config/resources/lifecycle.go | 3 ++ bundle/internal/schema/annotations.yml | 10 ++---- .../schema/annotations_openapi_overrides.yml | 31 ++++++++++--------- bundle/phases/plan.go | 30 ++++++++++-------- bundle/schema/jsonschema.json | 18 +++++++++++ 6 files changed, 59 insertions(+), 35 deletions(-) diff --git a/acceptance/bundle/lifecycle/prevent-destroy/script b/acceptance/bundle/lifecycle/prevent-destroy/script index 3f8e8f440f..9b018d0372 100644 --- a/acceptance/bundle/lifecycle/prevent-destroy/script +++ b/acceptance/bundle/lifecycle/prevent-destroy/script @@ -16,7 +16,7 @@ trace errcode $CLI bundle plan >>out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 trace $CLI bundle deploy --auto-approve >>out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 update_file.py databricks.yml 'prevent_destroy: false' 'prevent_destroy: true' -# Removing the job, deploy must succeed +# Removing the pipeline, deploy must succeed update_file.py databricks.yml '<<: *pipeline_base' '' trace errcode $CLI bundle plan >>out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 trace $CLI bundle deploy --auto-approve >>out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 diff --git a/bundle/config/resources/lifecycle.go b/bundle/config/resources/lifecycle.go index 055b4d4d36..c3de7ce8ea 100644 --- a/bundle/config/resources/lifecycle.go +++ b/bundle/config/resources/lifecycle.go @@ -1,5 +1,8 @@ package resources +// Lifecycle is a struct that contains the lifecycle settings for a resource. +// It controls the behavior of the resource when it is deployed or destroyed. type Lifecycle struct { + // Lifecycle setting to prevent the resource from being destroyed. PreventDestroy bool `json:"prevent_destroy,omitempty"` } diff --git a/bundle/internal/schema/annotations.yml b/bundle/internal/schema/annotations.yml index 51b803274e..08d072226f 100644 --- a/bundle/internal/schema/annotations.yml +++ b/bundle/internal/schema/annotations.yml @@ -478,10 +478,6 @@ github.com/databricks/cli/bundle/config/resources.DashboardPermission: "user_name": "description": |- PLACEHOLDER -github.com/databricks/cli/bundle/config/resources.DatabaseCatalog: - "lifecycle": - "description": |- - PLACEHOLDER github.com/databricks/cli/bundle/config/resources.DatabaseInstancePermission: "group_name": "description": |- @@ -518,7 +514,7 @@ github.com/databricks/cli/bundle/config/resources.JobPermission: github.com/databricks/cli/bundle/config/resources.Lifecycle: "prevent_destroy": "description": |- - PLACEHOLDER + Lifecycle setting to prevent the resource from being destroyed. github.com/databricks/cli/bundle/config/resources.MlflowExperimentPermission: "group_name": "description": |- @@ -605,7 +601,7 @@ github.com/databricks/cli/bundle/config/resources.SecretScope: The metadata for the secret scope if the `backend_type` is `AZURE_KEYVAULT` "lifecycle": "description": |- - PLACEHOLDER + Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed. "name": "description": |- Scope name requested by the user. Scope names are unique. @@ -653,7 +649,7 @@ github.com/databricks/cli/bundle/config/resources.SyncedDatabaseTable: PLACEHOLDER "lifecycle": "description": |- - PLACEHOLDER + Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed. "logical_database_name": "description": |- PLACEHOLDER diff --git a/bundle/internal/schema/annotations_openapi_overrides.yml b/bundle/internal/schema/annotations_openapi_overrides.yml index 680b15b660..35593f2e7f 100644 --- a/bundle/internal/schema/annotations_openapi_overrides.yml +++ b/bundle/internal/schema/annotations_openapi_overrides.yml @@ -16,7 +16,7 @@ github.com/databricks/cli/bundle/config/resources.App: PLACEHOLDER "lifecycle": "description": |- - PLACEHOLDER + Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed. "oauth2_app_client_id": "description": |- PLACEHOLDER @@ -89,7 +89,7 @@ github.com/databricks/cli/bundle/config/resources.Cluster: PLACEHOLDER "lifecycle": "description": |- - PLACEHOLDER + Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed. "permissions": "description": |- PLACEHOLDER @@ -148,7 +148,7 @@ github.com/databricks/cli/bundle/config/resources.Dashboard: PLACEHOLDER "lifecycle": "description": |- - PLACEHOLDER + Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed. "lifecycle_state": "description": |- The state of the dashboard resource. Used for tracking trashed status. @@ -195,13 +195,16 @@ github.com/databricks/cli/bundle/config/resources.DatabaseCatalog: "create_database_if_not_exists": "description": |- PLACEHOLDER + "lifecycle": + "description": |- + Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed. "uid": "description": |- PLACEHOLDER github.com/databricks/cli/bundle/config/resources.DatabaseInstance: "lifecycle": "description": |- - PLACEHOLDER + Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed. "permissions": "description": |- PLACEHOLDER @@ -238,7 +241,7 @@ github.com/databricks/cli/bundle/config/resources.Job: PLACEHOLDER "lifecycle": "description": |- - PLACEHOLDER + Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed. "permissions": "description": |- PLACEHOLDER @@ -275,7 +278,7 @@ github.com/databricks/cli/bundle/config/resources.MlflowExperiment: ``` "lifecycle": "description": |- - PLACEHOLDER + Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed. "permissions": "description": |- PLACEHOLDER @@ -294,7 +297,7 @@ github.com/databricks/cli/bundle/config/resources.MlflowModel: The model resource allows you to define [legacy models](/api/workspace/modelregistry/createmodel) in bundles. Databricks recommends you use Unity Catalog [registered models](#registered-model) instead. "lifecycle": "description": |- - PLACEHOLDER + Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed. "permissions": "description": |- PLACEHOLDER @@ -342,7 +345,7 @@ github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint: PLACEHOLDER "lifecycle": "description": |- - PLACEHOLDER + Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed. "permissions": "description": |- PLACEHOLDER @@ -384,7 +387,7 @@ github.com/databricks/cli/bundle/config/resources.Pipeline: PLACEHOLDER "lifecycle": "description": |- - PLACEHOLDER + Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed. "permissions": "description": |- PLACEHOLDER @@ -435,7 +438,7 @@ github.com/databricks/cli/bundle/config/resources.QualityMonitor: PLACEHOLDER "lifecycle": "description": |- - PLACEHOLDER + Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed. "table_name": "description": |- PLACEHOLDER @@ -464,7 +467,7 @@ github.com/databricks/cli/bundle/config/resources.RegisteredModel: PLACEHOLDER "lifecycle": "description": |- - PLACEHOLDER + Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed. github.com/databricks/cli/bundle/config/resources.Schema: "_": "markdown_description": |- @@ -517,7 +520,7 @@ github.com/databricks/cli/bundle/config/resources.Schema: PLACEHOLDER "lifecycle": "description": |- - PLACEHOLDER + Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed. "properties": "description": |- PLACEHOLDER @@ -567,7 +570,7 @@ github.com/databricks/cli/bundle/config/resources.SqlWarehouse: Defaults to true. "lifecycle": "description": |- - PLACEHOLDER + Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed. "permissions": "description": |- PLACEHOLDER @@ -614,7 +617,7 @@ github.com/databricks/cli/bundle/config/resources.Volume: PLACEHOLDER "lifecycle": "description": |- - PLACEHOLDER + Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed. "volume_type": "description": |- PLACEHOLDER diff --git a/bundle/phases/plan.go b/bundle/phases/plan.go index 56fd5e73d2..b93c743f14 100644 --- a/bundle/phases/plan.go +++ b/bundle/phases/plan.go @@ -55,20 +55,24 @@ func deployPrepare(ctx context.Context, b *bundle.Bundle) map[string][]libraries func checkForPreventDestroy(b *bundle.Bundle, actions []deployplan.Action, isDestroy bool) error { root := b.Config.Value() for _, action := range actions { - if action.ActionType == deployplan.ActionTypeRecreate || (isDestroy && action.ActionType == deployplan.ActionTypeDelete) { - path := dyn.NewPath(dyn.Key("resources"), dyn.Key(action.Group), dyn.Key(action.Key), dyn.Key("lifecycle")) - lifecycleV, err := dyn.GetByPath(root, path) - // If there is no lifecycle, skip - if err != nil { - return nil - } + // If the action is not a recreate or a delete as part of destroy - skip checking for prevent destroy + // We allow delete as part of deploy though (hence isDestroy check) because we mimic the behavior of terraform which allows such resources to be removed from config. + if action.ActionType != deployplan.ActionTypeRecreate && !(isDestroy && action.ActionType == deployplan.ActionTypeDelete) { + continue + } + + path := dyn.NewPath(dyn.Key("resources"), dyn.Key(action.Group), dyn.Key(action.Key), dyn.Key("lifecycle")) + lifecycleV, err := dyn.GetByPath(root, path) + // If there is no lifecycle, skip + if err != nil { + return nil + } - if lifecycleV.Kind() == dyn.KindMap { - preventDestroyV := lifecycleV.Get("prevent_destroy") - preventDestroy, ok := preventDestroyV.AsBool() - if ok && preventDestroy { - return fmt.Errorf("resource %s has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. To avoid this error, disable lifecycle.prevent_destroy for %s.%s", action.Key, action.Group, action.Key) - } + if lifecycleV.Kind() == dyn.KindMap { + preventDestroyV := lifecycleV.Get("prevent_destroy") + preventDestroy, ok := preventDestroyV.AsBool() + if ok && preventDestroy { + return fmt.Errorf("resource %s has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. To avoid this error, disable lifecycle.prevent_destroy for %s.%s", action.Key, action.Group, action.Key) } } } diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index 0c43bee124..4a5200be0c 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -108,6 +108,7 @@ "$ref": "#/$defs/string" }, "lifecycle": { + "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" }, "name": { @@ -295,6 +296,7 @@ "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Kind" }, "lifecycle": { + "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" }, "node_type_id": { @@ -433,6 +435,7 @@ "$ref": "#/$defs/string" }, "lifecycle": { + "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" }, "lifecycle_state": { @@ -535,6 +538,7 @@ "$ref": "#/$defs/string" }, "lifecycle": { + "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" }, "name": { @@ -609,6 +613,7 @@ "$ref": "#/$defs/bool" }, "lifecycle": { + "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" }, "name": { @@ -774,6 +779,7 @@ "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.JobCluster" }, "lifecycle": { + "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" }, "max_concurrent_runs": { @@ -898,6 +904,7 @@ "type": "object", "properties": { "prevent_destroy": { + "description": "Lifecycle setting to prevent the resource from being destroyed.", "$ref": "#/$defs/bool" } }, @@ -932,6 +939,7 @@ "$ref": "#/$defs/int64" }, "lifecycle": { + "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" }, "lifecycle_stage": { @@ -1014,6 +1022,7 @@ "$ref": "#/$defs/string" }, "lifecycle": { + "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" }, "name": { @@ -1112,6 +1121,7 @@ "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.EmailNotifications" }, "lifecycle": { + "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" }, "name": { @@ -1300,6 +1310,7 @@ "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineLibrary" }, "lifecycle": { + "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" }, "name": { @@ -1447,6 +1458,7 @@ "$ref": "#/$defs/string" }, "lifecycle": { + "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" }, "notifications": { @@ -1516,6 +1528,7 @@ "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Grant" }, "lifecycle": { + "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" }, "name": { @@ -1562,6 +1575,7 @@ "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.SchemaGrant" }, "lifecycle": { + "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" }, "name": { @@ -1653,6 +1667,7 @@ "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/workspace.AzureKeyVaultSecretScopeMetadata" }, "lifecycle": { + "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" }, "name": { @@ -1760,6 +1775,7 @@ "deprecated": true }, "lifecycle": { + "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" }, "max_num_clusters": { @@ -1861,6 +1877,7 @@ "$ref": "#/$defs/string" }, "lifecycle": { + "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" }, "logical_database_name": { @@ -1904,6 +1921,7 @@ "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.VolumeGrant" }, "lifecycle": { + "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" }, "name": { From db34f1075460b6531241660f64259d420568d89a Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Tue, 9 Sep 2025 13:42:49 +0200 Subject: [PATCH 15/19] fixes --- acceptance/bundle/refschema/out.fields.txt | 18 ++++++++++++++++++ bundle/config/resources/apps.go | 4 +--- bundle/config/resources/base.go | 1 + bundle/config/resources/clusters.go | 4 +--- bundle/config/resources/dashboard.go | 5 ++--- bundle/config/resources/database_catalog.go | 2 -- bundle/config/resources/database_instance.go | 4 +--- bundle/config/resources/job.go | 4 +--- bundle/config/resources/mlflow_experiment.go | 4 +--- bundle/config/resources/mlflow_model.go | 4 +--- .../config/resources/model_serving_endpoint.go | 10 ++++------ bundle/config/resources/pipeline.go | 5 ++--- bundle/config/resources/quality_monitor.go | 7 +++---- bundle/config/resources/registered_model.go | 9 ++++----- bundle/config/resources/schema.go | 5 +---- bundle/config/resources/secret_scope.go | 2 -- bundle/config/resources/sql_warehouses.go | 4 +--- .../config/resources/synced_database_table.go | 2 -- bundle/config/resources/volume.go | 4 +--- bundle/phases/plan.go | 2 +- 20 files changed, 44 insertions(+), 56 deletions(-) diff --git a/acceptance/bundle/refschema/out.fields.txt b/acceptance/bundle/refschema/out.fields.txt index 77ef1eb6f9..940f91f073 100644 --- a/acceptance/bundle/refschema/out.fields.txt +++ b/acceptance/bundle/refschema/out.fields.txt @@ -27,6 +27,8 @@ resources.apps.*.effective_budget_policy_id string ALL resources.apps.*.effective_user_api_scopes []string ALL resources.apps.*.effective_user_api_scopes[*] string ALL resources.apps.*.id string ALL +resources.apps.*.lifecycle resources.Lifecycle INPUT +resources.apps.*.lifecycle.prevent_destroy bool INPUT resources.apps.*.modified_status string INPUT resources.apps.*.name string ALL resources.apps.*.oauth2_app_client_id string ALL @@ -87,6 +89,8 @@ resources.database_catalogs.*.create_database_if_not_exists bool ALL resources.database_catalogs.*.database_instance_name string ALL resources.database_catalogs.*.database_name string ALL resources.database_catalogs.*.id string INPUT +resources.database_catalogs.*.lifecycle resources.Lifecycle INPUT +resources.database_catalogs.*.lifecycle.prevent_destroy bool INPUT resources.database_catalogs.*.modified_status string INPUT resources.database_catalogs.*.name string ALL resources.database_catalogs.*.uid string ALL @@ -109,6 +113,8 @@ resources.database_instances.*.effective_stopped bool ALL resources.database_instances.*.enable_pg_native_login bool ALL resources.database_instances.*.enable_readable_secondaries bool ALL resources.database_instances.*.id string INPUT +resources.database_instances.*.lifecycle resources.Lifecycle INPUT +resources.database_instances.*.lifecycle.prevent_destroy bool INPUT resources.database_instances.*.modified_status string INPUT resources.database_instances.*.name string ALL resources.database_instances.*.node_count int ALL @@ -294,6 +300,8 @@ resources.jobs.*.job_clusters[*].new_cluster.workload_type.clients compute.Clien resources.jobs.*.job_clusters[*].new_cluster.workload_type.clients.jobs bool INPUT STATE resources.jobs.*.job_clusters[*].new_cluster.workload_type.clients.notebooks bool INPUT STATE resources.jobs.*.job_id int64 REMOTE +resources.jobs.*.lifecycle resources.Lifecycle INPUT +resources.jobs.*.lifecycle.prevent_destroy bool INPUT resources.jobs.*.max_concurrent_runs int INPUT STATE resources.jobs.*.modified_status string INPUT resources.jobs.*.name string INPUT STATE @@ -2082,6 +2090,8 @@ resources.pipelines.*.libraries[*].maven.repo string INPUT STATE resources.pipelines.*.libraries[*].notebook *pipelines.NotebookLibrary INPUT STATE resources.pipelines.*.libraries[*].notebook.path string INPUT STATE resources.pipelines.*.libraries[*].whl string INPUT STATE +resources.pipelines.*.lifecycle resources.Lifecycle INPUT +resources.pipelines.*.lifecycle.prevent_destroy bool INPUT resources.pipelines.*.modified_status string INPUT resources.pipelines.*.name string ALL resources.pipelines.*.notifications []pipelines.Notifications INPUT STATE @@ -2389,6 +2399,8 @@ resources.schemas.*.grants[*].principal string INPUT resources.schemas.*.grants[*].privileges []resources.SchemaGrantPrivilege INPUT resources.schemas.*.grants[*].privileges[*] resources.SchemaGrantPrivilege INPUT resources.schemas.*.id string INPUT +resources.schemas.*.lifecycle resources.Lifecycle INPUT +resources.schemas.*.lifecycle.prevent_destroy bool INPUT resources.schemas.*.metastore_id string REMOTE resources.schemas.*.modified_status string INPUT resources.schemas.*.name string ALL @@ -2422,6 +2434,8 @@ resources.sql_warehouses.*.health.summary string REMOTE resources.sql_warehouses.*.id string INPUT REMOTE resources.sql_warehouses.*.instance_profile_arn string ALL resources.sql_warehouses.*.jdbc_url string REMOTE +resources.sql_warehouses.*.lifecycle resources.Lifecycle INPUT +resources.sql_warehouses.*.lifecycle.prevent_destroy bool INPUT resources.sql_warehouses.*.max_num_clusters int ALL resources.sql_warehouses.*.min_num_clusters int ALL resources.sql_warehouses.*.modified_status string INPUT @@ -2494,6 +2508,8 @@ resources.synced_database_tables.*.database_instance_name string ALL resources.synced_database_tables.*.effective_database_instance_name string ALL resources.synced_database_tables.*.effective_logical_database_name string ALL resources.synced_database_tables.*.id string INPUT +resources.synced_database_tables.*.lifecycle resources.Lifecycle INPUT +resources.synced_database_tables.*.lifecycle.prevent_destroy bool INPUT resources.synced_database_tables.*.logical_database_name string ALL resources.synced_database_tables.*.modified_status string INPUT resources.synced_database_tables.*.name string ALL @@ -2527,6 +2543,8 @@ resources.volumes.*.grants[*].principal string INPUT resources.volumes.*.grants[*].privileges []resources.VolumeGrantPrivilege INPUT resources.volumes.*.grants[*].privileges[*] resources.VolumeGrantPrivilege INPUT resources.volumes.*.id string INPUT +resources.volumes.*.lifecycle resources.Lifecycle INPUT +resources.volumes.*.lifecycle.prevent_destroy bool INPUT resources.volumes.*.metastore_id string REMOTE resources.volumes.*.modified_status string INPUT resources.volumes.*.name string ALL diff --git a/bundle/config/resources/apps.go b/bundle/config/resources/apps.go index de9028f625..177c242e5b 100644 --- a/bundle/config/resources/apps.go +++ b/bundle/config/resources/apps.go @@ -24,6 +24,7 @@ type AppPermission struct { type App struct { BaseResource + apps.App // nolint App struct also defines Id and URL field with the same json tag "id" and "url" // SourceCodePath is a required field used by DABs to point to Databricks app source code // on local disk and to the corresponding workspace path during app deployment. @@ -36,9 +37,6 @@ type App struct { Config map[string]any `json:"config,omitempty"` Permissions []AppPermission `json:"permissions,omitempty"` - - Lifecycle Lifecycle `json:"lifecycle,omitempty"` - apps.App // nolint App struct also defines Id and URL field with the same json tag "id" and "url" } func (a *App) UnmarshalJSON(b []byte) error { diff --git a/bundle/config/resources/base.go b/bundle/config/resources/base.go index 31996d9562..792db28972 100644 --- a/bundle/config/resources/base.go +++ b/bundle/config/resources/base.go @@ -5,4 +5,5 @@ type BaseResource struct { ID string `json:"id,omitempty" bundle:"readonly"` ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` URL string `json:"url,omitempty" bundle:"internal"` + Lifecycle Lifecycle `json:"lifecycle,omitempty"` } diff --git a/bundle/config/resources/clusters.go b/bundle/config/resources/clusters.go index 2e05ffe50d..a604dc3ee0 100644 --- a/bundle/config/resources/clusters.go +++ b/bundle/config/resources/clusters.go @@ -24,11 +24,9 @@ type ClusterPermission struct { type Cluster struct { BaseResource + compute.ClusterSpec Permissions []ClusterPermission `json:"permissions,omitempty"` - - compute.ClusterSpec - Lifecycle Lifecycle `json:"lifecycle,omitempty"` } func (s *Cluster) UnmarshalJSON(b []byte) error { diff --git a/bundle/config/resources/dashboard.go b/bundle/config/resources/dashboard.go index 8cf272bc8e..ac88d801c5 100644 --- a/bundle/config/resources/dashboard.go +++ b/bundle/config/resources/dashboard.go @@ -46,10 +46,9 @@ type DashboardConfig struct { type Dashboard struct { BaseResource - Permissions []DashboardPermission `json:"permissions,omitempty"` - DashboardConfig - Lifecycle Lifecycle `json:"lifecycle,omitempty"` + + Permissions []DashboardPermission `json:"permissions,omitempty"` // FilePath points to the local `.lvdash.json` file containing the dashboard definition. // This is inlined into serialized_dashboard during deployment. The file_path is kept around diff --git a/bundle/config/resources/database_catalog.go b/bundle/config/resources/database_catalog.go index dfacdcdcad..0de0fa4571 100644 --- a/bundle/config/resources/database_catalog.go +++ b/bundle/config/resources/database_catalog.go @@ -12,9 +12,7 @@ import ( type DatabaseCatalog struct { BaseResource - database.DatabaseCatalog - Lifecycle Lifecycle `json:"lifecycle,omitempty"` } func (d *DatabaseCatalog) Exists(ctx context.Context, w *databricks.WorkspaceClient, name string) (bool, error) { diff --git a/bundle/config/resources/database_instance.go b/bundle/config/resources/database_instance.go index 6111a55f91..75ebb17a12 100644 --- a/bundle/config/resources/database_instance.go +++ b/bundle/config/resources/database_instance.go @@ -24,11 +24,9 @@ type DatabaseInstancePermission struct { type DatabaseInstance struct { BaseResource + database.DatabaseInstance Permissions []DatabaseInstancePermission `json:"permissions,omitempty"` - - database.DatabaseInstance - Lifecycle Lifecycle `json:"lifecycle,omitempty"` } func (d *DatabaseInstance) Exists(ctx context.Context, w *databricks.WorkspaceClient, name string) (bool, error) { diff --git a/bundle/config/resources/job.go b/bundle/config/resources/job.go index 8debbef54e..f3b8e9405e 100644 --- a/bundle/config/resources/job.go +++ b/bundle/config/resources/job.go @@ -25,11 +25,9 @@ type JobPermission struct { type Job struct { BaseResource + jobs.JobSettings Permissions []JobPermission `json:"permissions,omitempty"` - - jobs.JobSettings - Lifecycle Lifecycle `json:"lifecycle,omitempty"` } func (j *Job) UnmarshalJSON(b []byte) error { diff --git a/bundle/config/resources/mlflow_experiment.go b/bundle/config/resources/mlflow_experiment.go index bc2df5db10..8a9cc52d05 100644 --- a/bundle/config/resources/mlflow_experiment.go +++ b/bundle/config/resources/mlflow_experiment.go @@ -24,11 +24,9 @@ type MlflowExperimentPermission struct { type MlflowExperiment struct { BaseResource + ml.Experiment Permissions []MlflowExperimentPermission `json:"permissions,omitempty"` - - ml.Experiment - Lifecycle Lifecycle `json:"lifecycle,omitempty"` } func (s *MlflowExperiment) UnmarshalJSON(b []byte) error { diff --git a/bundle/config/resources/mlflow_model.go b/bundle/config/resources/mlflow_model.go index 8a6546ecba..ffa88f4d21 100644 --- a/bundle/config/resources/mlflow_model.go +++ b/bundle/config/resources/mlflow_model.go @@ -24,11 +24,9 @@ type MlflowModelPermission struct { type MlflowModel struct { BaseResource + ml.CreateModelRequest Permissions []MlflowModelPermission `json:"permissions,omitempty"` - - ml.CreateModelRequest - Lifecycle Lifecycle `json:"lifecycle,omitempty"` } func (s *MlflowModel) UnmarshalJSON(b []byte) error { diff --git a/bundle/config/resources/model_serving_endpoint.go b/bundle/config/resources/model_serving_endpoint.go index a7912788be..fc597db51c 100644 --- a/bundle/config/resources/model_serving_endpoint.go +++ b/bundle/config/resources/model_serving_endpoint.go @@ -23,17 +23,15 @@ type ModelServingEndpointPermission struct { } type ModelServingEndpoint struct { - Lifecycle Lifecycle `json:"lifecycle,omitempty"` - BaseResource - // This is a resource agnostic implementation of permissions for ACLs. - // Implementation could be different based on the resource type. - Permissions []ModelServingEndpointPermission `json:"permissions,omitempty"` - // This represents the input args for terraform, and will get converted // to a HCL representation for CRUD serving.CreateServingEndpoint + + // This is a resource agnostic implementation of permissions for ACLs. + // Implementation could be different based on the resource type. + Permissions []ModelServingEndpointPermission `json:"permissions,omitempty"` } func (s *ModelServingEndpoint) UnmarshalJSON(b []byte) error { diff --git a/bundle/config/resources/pipeline.go b/bundle/config/resources/pipeline.go index ffd1e1dd51..b287dc0a12 100644 --- a/bundle/config/resources/pipeline.go +++ b/bundle/config/resources/pipeline.go @@ -24,10 +24,9 @@ type PipelinePermission struct { type Pipeline struct { BaseResource - Permissions []PipelinePermission `json:"permissions,omitempty"` - - Lifecycle Lifecycle `json:"lifecycle,omitempty"` pipelines.CreatePipeline //nolint CreatePipeline also defines Id field with the same json tag "id" + + Permissions []PipelinePermission `json:"permissions,omitempty"` } func (p *Pipeline) UnmarshalJSON(b []byte) error { diff --git a/bundle/config/resources/quality_monitor.go b/bundle/config/resources/quality_monitor.go index a62231c848..f373676d45 100644 --- a/bundle/config/resources/quality_monitor.go +++ b/bundle/config/resources/quality_monitor.go @@ -14,12 +14,11 @@ import ( type QualityMonitor struct { BaseResource - // The table name is a required field but not included as a JSON field in [catalog.CreateMonitor]. - TableName string `json:"table_name"` - // This struct defines the creation payload for a monitor. catalog.CreateMonitor - Lifecycle Lifecycle `json:"lifecycle,omitempty"` + + // The table name is a required field but not included as a JSON field in [catalog.CreateMonitor]. + TableName string `json:"table_name"` } func (s *QualityMonitor) UnmarshalJSON(b []byte) error { diff --git a/bundle/config/resources/registered_model.go b/bundle/config/resources/registered_model.go index 326d1d63fa..c8d82d08f1 100644 --- a/bundle/config/resources/registered_model.go +++ b/bundle/config/resources/registered_model.go @@ -14,14 +14,13 @@ import ( type RegisteredModel struct { BaseResource - // This is a resource agnostic implementation of grants. - // Implementation could be different based on the resource type. - Grants []Grant `json:"grants,omitempty"` - // This represents the input args for terraform, and will get converted // to a HCL representation for CRUD catalog.CreateRegisteredModelRequest - Lifecycle Lifecycle `json:"lifecycle,omitempty"` + + // This is a resource agnostic implementation of grants. + // Implementation could be different based on the resource type. + Grants []Grant `json:"grants,omitempty"` } func (s *RegisteredModel) UnmarshalJSON(b []byte) error { diff --git a/bundle/config/resources/schema.go b/bundle/config/resources/schema.go index 574c2749bf..af49b5dbe2 100644 --- a/bundle/config/resources/schema.go +++ b/bundle/config/resources/schema.go @@ -61,12 +61,9 @@ type SchemaGrant struct { type Schema struct { BaseResource - + catalog.CreateSchema // List of grants to apply on this schema. Grants []SchemaGrant `json:"grants,omitempty"` - - catalog.CreateSchema - Lifecycle Lifecycle `json:"lifecycle,omitempty"` } func (s *Schema) Exists(ctx context.Context, w *databricks.WorkspaceClient, fullName string) (bool, error) { diff --git a/bundle/config/resources/secret_scope.go b/bundle/config/resources/secret_scope.go index 43180355cd..10c3bfb7d9 100644 --- a/bundle/config/resources/secret_scope.go +++ b/bundle/config/resources/secret_scope.go @@ -41,8 +41,6 @@ type SecretScope struct { BackendType workspace.ScopeBackendType `json:"backend_type,omitempty"` // The metadata for the secret scope if the type is `AZURE_KEYVAULT` KeyvaultMetadata *workspace.AzureKeyVaultSecretScopeMetadata `json:"keyvault_metadata,omitempty"` - - Lifecycle Lifecycle `json:"lifecycle,omitempty"` } func (s *SecretScope) UnmarshalJSON(b []byte) error { diff --git a/bundle/config/resources/sql_warehouses.go b/bundle/config/resources/sql_warehouses.go index 8ddf96ea16..53302369b8 100644 --- a/bundle/config/resources/sql_warehouses.go +++ b/bundle/config/resources/sql_warehouses.go @@ -22,11 +22,9 @@ type SqlWarehousePermission struct { type SqlWarehouse struct { BaseResource + sql.CreateWarehouseRequest Permissions []SqlWarehousePermission `json:"permissions,omitempty"` - - sql.CreateWarehouseRequest - Lifecycle Lifecycle `json:"lifecycle,omitempty"` } func (sw *SqlWarehouse) UnmarshalJSON(b []byte) error { diff --git a/bundle/config/resources/synced_database_table.go b/bundle/config/resources/synced_database_table.go index f79303954d..5577fe47b0 100644 --- a/bundle/config/resources/synced_database_table.go +++ b/bundle/config/resources/synced_database_table.go @@ -12,9 +12,7 @@ import ( type SyncedDatabaseTable struct { BaseResource - database.SyncedDatabaseTable - Lifecycle Lifecycle `json:"lifecycle,omitempty"` } func (s *SyncedDatabaseTable) Exists(ctx context.Context, w *databricks.WorkspaceClient, name string) (bool, error) { diff --git a/bundle/config/resources/volume.go b/bundle/config/resources/volume.go index ba8a063f1c..8c47a6afc4 100644 --- a/bundle/config/resources/volume.go +++ b/bundle/config/resources/volume.go @@ -42,12 +42,10 @@ type VolumeGrant struct { type Volume struct { BaseResource + catalog.CreateVolumeRequestContent // List of grants to apply on this volume. Grants []VolumeGrant `json:"grants,omitempty"` - - catalog.CreateVolumeRequestContent - Lifecycle Lifecycle `json:"lifecycle,omitempty"` } func (v *Volume) UnmarshalJSON(b []byte) error { diff --git a/bundle/phases/plan.go b/bundle/phases/plan.go index b93c743f14..74f7e7ef17 100644 --- a/bundle/phases/plan.go +++ b/bundle/phases/plan.go @@ -57,7 +57,7 @@ func checkForPreventDestroy(b *bundle.Bundle, actions []deployplan.Action, isDes for _, action := range actions { // If the action is not a recreate or a delete as part of destroy - skip checking for prevent destroy // We allow delete as part of deploy though (hence isDestroy check) because we mimic the behavior of terraform which allows such resources to be removed from config. - if action.ActionType != deployplan.ActionTypeRecreate && !(isDestroy && action.ActionType == deployplan.ActionTypeDelete) { + if action.ActionType != deployplan.ActionTypeRecreate && (!isDestroy || action.ActionType != deployplan.ActionTypeDelete) { continue } From 9118cebd9796d63656e07febeeca7bd6d80a860e Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Tue, 9 Sep 2025 13:45:49 +0200 Subject: [PATCH 16/19] regenerate python --- experimental/python/databricks/bundles/jobs/_models/job.py | 6 ++++++ .../python/databricks/bundles/jobs/_models/lifecycle.py | 6 ++++++ .../databricks/bundles/pipelines/_models/lifecycle.py | 6 ++++++ .../python/databricks/bundles/pipelines/_models/pipeline.py | 6 ++++++ .../python/databricks/bundles/schemas/_models/lifecycle.py | 6 ++++++ .../python/databricks/bundles/schemas/_models/schema.py | 6 ++++++ .../python/databricks/bundles/volumes/_models/lifecycle.py | 6 ++++++ .../python/databricks/bundles/volumes/_models/volume.py | 6 ++++++ 8 files changed, 48 insertions(+) diff --git a/experimental/python/databricks/bundles/jobs/_models/job.py b/experimental/python/databricks/bundles/jobs/_models/job.py index 8ba6d20d92..a751c6c43e 100644 --- a/experimental/python/databricks/bundles/jobs/_models/job.py +++ b/experimental/python/databricks/bundles/jobs/_models/job.py @@ -118,6 +118,9 @@ class Job(Resource): """ lifecycle: VariableOrOptional[Lifecycle] = None + """ + Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed. + """ max_concurrent_runs: VariableOrOptional[int] = None """ @@ -260,6 +263,9 @@ class JobDict(TypedDict, total=False): """ lifecycle: VariableOrOptional[LifecycleParam] + """ + Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed. + """ max_concurrent_runs: VariableOrOptional[int] """ diff --git a/experimental/python/databricks/bundles/jobs/_models/lifecycle.py b/experimental/python/databricks/bundles/jobs/_models/lifecycle.py index a71b4e0be7..c934967f37 100644 --- a/experimental/python/databricks/bundles/jobs/_models/lifecycle.py +++ b/experimental/python/databricks/bundles/jobs/_models/lifecycle.py @@ -14,6 +14,9 @@ class Lifecycle: """""" prevent_destroy: VariableOrOptional[bool] = None + """ + Lifecycle setting to prevent the resource from being destroyed. + """ @classmethod def from_dict(cls, value: "LifecycleDict") -> "Self": @@ -27,6 +30,9 @@ class LifecycleDict(TypedDict, total=False): """""" prevent_destroy: VariableOrOptional[bool] + """ + Lifecycle setting to prevent the resource from being destroyed. + """ LifecycleParam = LifecycleDict | Lifecycle diff --git a/experimental/python/databricks/bundles/pipelines/_models/lifecycle.py b/experimental/python/databricks/bundles/pipelines/_models/lifecycle.py index a71b4e0be7..c934967f37 100644 --- a/experimental/python/databricks/bundles/pipelines/_models/lifecycle.py +++ b/experimental/python/databricks/bundles/pipelines/_models/lifecycle.py @@ -14,6 +14,9 @@ class Lifecycle: """""" prevent_destroy: VariableOrOptional[bool] = None + """ + Lifecycle setting to prevent the resource from being destroyed. + """ @classmethod def from_dict(cls, value: "LifecycleDict") -> "Self": @@ -27,6 +30,9 @@ class LifecycleDict(TypedDict, total=False): """""" prevent_destroy: VariableOrOptional[bool] + """ + Lifecycle setting to prevent the resource from being destroyed. + """ LifecycleParam = LifecycleDict | Lifecycle diff --git a/experimental/python/databricks/bundles/pipelines/_models/pipeline.py b/experimental/python/databricks/bundles/pipelines/_models/pipeline.py index 90ae8356f1..919238e743 100644 --- a/experimental/python/databricks/bundles/pipelines/_models/pipeline.py +++ b/experimental/python/databricks/bundles/pipelines/_models/pipeline.py @@ -148,6 +148,9 @@ class Pipeline(Resource): """ lifecycle: VariableOrOptional[Lifecycle] = None + """ + Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed. + """ name: VariableOrOptional[str] = None """ @@ -308,6 +311,9 @@ class PipelineDict(TypedDict, total=False): """ lifecycle: VariableOrOptional[LifecycleParam] + """ + Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed. + """ name: VariableOrOptional[str] """ diff --git a/experimental/python/databricks/bundles/schemas/_models/lifecycle.py b/experimental/python/databricks/bundles/schemas/_models/lifecycle.py index a71b4e0be7..c934967f37 100644 --- a/experimental/python/databricks/bundles/schemas/_models/lifecycle.py +++ b/experimental/python/databricks/bundles/schemas/_models/lifecycle.py @@ -14,6 +14,9 @@ class Lifecycle: """""" prevent_destroy: VariableOrOptional[bool] = None + """ + Lifecycle setting to prevent the resource from being destroyed. + """ @classmethod def from_dict(cls, value: "LifecycleDict") -> "Self": @@ -27,6 +30,9 @@ class LifecycleDict(TypedDict, total=False): """""" prevent_destroy: VariableOrOptional[bool] + """ + Lifecycle setting to prevent the resource from being destroyed. + """ LifecycleParam = LifecycleDict | Lifecycle diff --git a/experimental/python/databricks/bundles/schemas/_models/schema.py b/experimental/python/databricks/bundles/schemas/_models/schema.py index d97415fa15..58975f0474 100644 --- a/experimental/python/databricks/bundles/schemas/_models/schema.py +++ b/experimental/python/databricks/bundles/schemas/_models/schema.py @@ -42,6 +42,9 @@ class Schema(Resource): grants: VariableOrList[SchemaGrant] = field(default_factory=list) lifecycle: VariableOrOptional[Lifecycle] = None + """ + Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed. + """ properties: VariableOrDict[str] = field(default_factory=dict) @@ -79,6 +82,9 @@ class SchemaDict(TypedDict, total=False): grants: VariableOrList[SchemaGrantParam] lifecycle: VariableOrOptional[LifecycleParam] + """ + Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed. + """ properties: VariableOrDict[str] diff --git a/experimental/python/databricks/bundles/volumes/_models/lifecycle.py b/experimental/python/databricks/bundles/volumes/_models/lifecycle.py index a71b4e0be7..c934967f37 100644 --- a/experimental/python/databricks/bundles/volumes/_models/lifecycle.py +++ b/experimental/python/databricks/bundles/volumes/_models/lifecycle.py @@ -14,6 +14,9 @@ class Lifecycle: """""" prevent_destroy: VariableOrOptional[bool] = None + """ + Lifecycle setting to prevent the resource from being destroyed. + """ @classmethod def from_dict(cls, value: "LifecycleDict") -> "Self": @@ -27,6 +30,9 @@ class LifecycleDict(TypedDict, total=False): """""" prevent_destroy: VariableOrOptional[bool] + """ + Lifecycle setting to prevent the resource from being destroyed. + """ LifecycleParam = LifecycleDict | Lifecycle diff --git a/experimental/python/databricks/bundles/volumes/_models/volume.py b/experimental/python/databricks/bundles/volumes/_models/volume.py index 20615ec27e..20132cca96 100644 --- a/experimental/python/databricks/bundles/volumes/_models/volume.py +++ b/experimental/python/databricks/bundles/volumes/_models/volume.py @@ -47,6 +47,9 @@ class Volume(Resource): grants: VariableOrList[VolumeGrant] = field(default_factory=list) lifecycle: VariableOrOptional[Lifecycle] = None + """ + Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed. + """ storage_location: VariableOrOptional[str] = None """ @@ -89,6 +92,9 @@ class VolumeDict(TypedDict, total=False): grants: VariableOrList[VolumeGrantParam] lifecycle: VariableOrOptional[LifecycleParam] + """ + Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed. + """ storage_location: VariableOrOptional[str] """ From fe565f509378a72fb4115637ad116270b15c8fd6 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Tue, 9 Sep 2025 14:58:31 +0200 Subject: [PATCH 17/19] do not use isDestroy flag and read prevent_destroy directly --- bundle/phases/deploy.go | 2 +- bundle/phases/destroy.go | 2 +- bundle/phases/plan.go | 24 +++++++++++------------- bundle/phases/plan_test.go | 2 +- 4 files changed, 14 insertions(+), 16 deletions(-) diff --git a/bundle/phases/deploy.go b/bundle/phases/deploy.go index caf5a325e6..f2661c5a2d 100644 --- a/bundle/phases/deploy.go +++ b/bundle/phases/deploy.go @@ -52,7 +52,7 @@ func approvalForDeploy(ctx context.Context, b *bundle.Bundle) (bool, error) { return false, err } - err = checkForPreventDestroy(b, actions, false) + err = checkForPreventDestroy(b, actions) if err != nil { return false, err } diff --git a/bundle/phases/destroy.go b/bundle/phases/destroy.go index 633eed30aa..619cfeb9b5 100644 --- a/bundle/phases/destroy.go +++ b/bundle/phases/destroy.go @@ -65,7 +65,7 @@ func approvalForDestroy(ctx context.Context, b *bundle.Bundle) (bool, error) { return false, err } - err = checkForPreventDestroy(b, deleteActions, true) + err = checkForPreventDestroy(b, deleteActions) if err != nil { return false, err } diff --git a/bundle/phases/plan.go b/bundle/phases/plan.go index 74f7e7ef17..0b0ef06c1e 100644 --- a/bundle/phases/plan.go +++ b/bundle/phases/plan.go @@ -52,28 +52,26 @@ func deployPrepare(ctx context.Context, b *bundle.Bundle) map[string][]libraries // checkForPreventDestroy checks if the resource has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. // If it does, it returns an error. -func checkForPreventDestroy(b *bundle.Bundle, actions []deployplan.Action, isDestroy bool) error { +func checkForPreventDestroy(b *bundle.Bundle, actions []deployplan.Action) error { root := b.Config.Value() for _, action := range actions { - // If the action is not a recreate or a delete as part of destroy - skip checking for prevent destroy - // We allow delete as part of deploy though (hence isDestroy check) because we mimic the behavior of terraform which allows such resources to be removed from config. - if action.ActionType != deployplan.ActionTypeRecreate && (!isDestroy || action.ActionType != deployplan.ActionTypeDelete) { + if action.ActionType != deployplan.ActionTypeRecreate && action.ActionType != deployplan.ActionTypeDelete { continue } - path := dyn.NewPath(dyn.Key("resources"), dyn.Key(action.Group), dyn.Key(action.Key), dyn.Key("lifecycle")) - lifecycleV, err := dyn.GetByPath(root, path) - // If there is no lifecycle, skip + path := dyn.NewPath(dyn.Key("resources"), dyn.Key(action.Group), dyn.Key(action.Key), dyn.Key("lifecycle"), dyn.Key("prevent_destroy")) + // If there is no prevent_destroy, skip + preventDestroyV, err := dyn.GetByPath(root, path) if err != nil { return nil } - if lifecycleV.Kind() == dyn.KindMap { - preventDestroyV := lifecycleV.Get("prevent_destroy") - preventDestroy, ok := preventDestroyV.AsBool() - if ok && preventDestroy { - return fmt.Errorf("resource %s has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. To avoid this error, disable lifecycle.prevent_destroy for %s.%s", action.Key, action.Group, action.Key) - } + preventDestroy, ok := preventDestroyV.AsBool() + if !ok { + return fmt.Errorf("internal error: prevent_destroy is not a boolean for %s.%s", action.Group, action.Key) + } + if preventDestroy { + return fmt.Errorf("resource %s has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. To avoid this error, disable lifecycle.prevent_destroy for %s.%s", action.Key, action.Group, action.Key) } } return nil diff --git a/bundle/phases/plan_test.go b/bundle/phases/plan_test.go index 3091073bfc..c3ccc460ed 100644 --- a/bundle/phases/plan_test.go +++ b/bundle/phases/plan_test.go @@ -54,7 +54,7 @@ func TestCheckPreventDestroyForAllResources(t *testing.T) { }, } - err := checkForPreventDestroy(b, actions, false) + err := checkForPreventDestroy(b, actions) require.Error(t, err) require.Contains(t, err.Error(), "resource test_resource has lifecycle.prevent_destroy set") require.Contains(t, err.Error(), "but the plan calls for this resource to be recreated or destroyed") From 65bc5dd776ecceaf0a7223e15e52f5b3193b1bd5 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Tue, 9 Sep 2025 15:50:59 +0200 Subject: [PATCH 18/19] add schemas to test --- .../lifecycle/prevent-destroy/databricks.yml | 19 +---- .../prevent-destroy/out.direct-exp.txt | 19 ++++- .../prevent-destroy/out.terraform.txt | 73 ++++++++++++++++++- .../lifecycle/prevent-destroy/output.txt | 2 + .../prevent-destroy/resources/pipeline.yml | 11 +++ .../prevent-destroy/resources/schema.yml | 7 ++ .../bundle/lifecycle/prevent-destroy/script | 20 +++-- .../lifecycle/prevent-destroy/test.toml | 27 +++++++ 8 files changed, 153 insertions(+), 25 deletions(-) create mode 100644 acceptance/bundle/lifecycle/prevent-destroy/resources/pipeline.yml create mode 100644 acceptance/bundle/lifecycle/prevent-destroy/resources/schema.yml diff --git a/acceptance/bundle/lifecycle/prevent-destroy/databricks.yml b/acceptance/bundle/lifecycle/prevent-destroy/databricks.yml index a8eb696cf8..67f898bb53 100644 --- a/acceptance/bundle/lifecycle/prevent-destroy/databricks.yml +++ b/acceptance/bundle/lifecycle/prevent-destroy/databricks.yml @@ -1,20 +1,5 @@ bundle: name: prevent-destroy -lifecycle: &lifecycle_base - lifecycle: - prevent_destroy: true - -pipeline: &pipeline_base - resources: - pipelines: - my_pipelines: - name: "test-pipeline" - libraries: - - notebook: - path: "./test-notebook.py" - <<: *lifecycle_base - schema: test-schema - catalog: main - -<<: *pipeline_base +include: + - resources/*.yml diff --git a/acceptance/bundle/lifecycle/prevent-destroy/out.direct-exp.txt b/acceptance/bundle/lifecycle/prevent-destroy/out.direct-exp.txt index 7c9c750fef..474f0d4c50 100644 --- a/acceptance/bundle/lifecycle/prevent-destroy/out.direct-exp.txt +++ b/acceptance/bundle/lifecycle/prevent-destroy/out.direct-exp.txt @@ -1,7 +1,15 @@ +>>> musterr [CLI] bundle destroy --auto-approve +Error: resource my_pipelines has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. To avoid this error, disable lifecycle.prevent_destroy for pipelines.my_pipelines + + +Exit code (musterr): 1 + >>> errcode [CLI] bundle plan +recreate pipelines.my_pipelines ->>> musterr [CLI] bundle destroy --auto-approve +>>> musterr [CLI] bundle deploy +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/prevent-destroy/default/files... Error: resource my_pipelines has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. To avoid this error, disable lifecycle.prevent_destroy for pipelines.my_pipelines @@ -9,6 +17,7 @@ Exit code (musterr): 1 >>> errcode [CLI] bundle plan recreate pipelines.my_pipelines +recreate schemas.my_schema >>> musterr [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/prevent-destroy/default/files... @@ -19,10 +28,14 @@ Exit code (musterr): 1 >>> errcode [CLI] bundle plan recreate pipelines.my_pipelines +recreate schemas.my_schema >>> [CLI] bundle deploy --auto-approve Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/prevent-destroy/default/files... +This action will result in the deletion or recreation of the following UC schemas. Any underlying data may be lost: + recreate schema my_schema + This action will result in the deletion or recreation of the following Lakeflow Declarative Pipelines along with the Streaming Tables (STs) and Materialized Views (MVs) managed by them. Recreating the pipelines will restore the defined STs and MVs through full refresh. Note that recreation is necessary when pipeline @@ -34,10 +47,14 @@ Deployment complete! >>> errcode [CLI] bundle plan delete pipelines.my_pipelines +delete schemas.my_schema >>> [CLI] bundle deploy --auto-approve Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/prevent-destroy/default/files... +This action will result in the deletion or recreation of the following UC schemas. Any underlying data may be lost: + delete schema my_schema + This action will result in the deletion or recreation of the following Lakeflow Declarative Pipelines along with the Streaming Tables (STs) and Materialized Views (MVs) managed by them. Recreating the pipelines will restore the defined STs and MVs through full refresh. Note that recreation is necessary when pipeline diff --git a/acceptance/bundle/lifecycle/prevent-destroy/out.terraform.txt b/acceptance/bundle/lifecycle/prevent-destroy/out.terraform.txt index 01353aa146..3c78d4985c 100644 --- a/acceptance/bundle/lifecycle/prevent-destroy/out.terraform.txt +++ b/acceptance/bundle/lifecycle/prevent-destroy/out.terraform.txt @@ -1,7 +1,50 @@ +>>> musterr [CLI] bundle destroy --auto-approve +Error: exit status 1 + +Error: Instance cannot be destroyed + + on bundle.tf.json line 15, in resource.databricks_pipeline: + 15: "my_pipelines": { + +Resource databricks_pipeline.my_pipelines has lifecycle.prevent_destroy set, +but the plan calls for this resource to be destroyed. To avoid this error and +continue with the plan, either disable lifecycle.prevent_destroy or reduce +the scope of the plan using the -target flag. + +Error: Instance cannot be destroyed + + on bundle.tf.json line 38, in resource.databricks_schema: + 38: "my_schema": { + +Resource databricks_schema.my_schema has lifecycle.prevent_destroy set, but +the plan calls for this resource to be destroyed. To avoid this error and +continue with the plan, either disable lifecycle.prevent_destroy or reduce +the scope of the plan using the -target flag. + + + +Exit code (musterr): 1 + >>> errcode [CLI] bundle plan +Error: exit status 1 ->>> musterr [CLI] bundle destroy --auto-approve +Error: Instance cannot be destroyed + + on bundle.tf.json line 15, in resource.databricks_pipeline: + 15: "my_pipelines": { + +Resource databricks_pipeline.my_pipelines has lifecycle.prevent_destroy set, +but the plan calls for this resource to be destroyed. To avoid this error and +continue with the plan, either disable lifecycle.prevent_destroy or reduce +the scope of the plan using the -target flag. + + + +Exit code: 1 + +>>> musterr [CLI] bundle deploy +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/prevent-destroy/default/files... Error: exit status 1 Error: Instance cannot be destroyed @@ -31,6 +74,16 @@ but the plan calls for this resource to be destroyed. To avoid this error and continue with the plan, either disable lifecycle.prevent_destroy or reduce the scope of the plan using the -target flag. +Error: Instance cannot be destroyed + + on bundle.tf.json line 38, in resource.databricks_schema: + 38: "my_schema": { + +Resource databricks_schema.my_schema has lifecycle.prevent_destroy set, but +the plan calls for this resource to be destroyed. To avoid this error and +continue with the plan, either disable lifecycle.prevent_destroy or reduce +the scope of the plan using the -target flag. + Exit code: 1 @@ -49,16 +102,30 @@ but the plan calls for this resource to be destroyed. To avoid this error and continue with the plan, either disable lifecycle.prevent_destroy or reduce the scope of the plan using the -target flag. +Error: Instance cannot be destroyed + + on bundle.tf.json line 38, in resource.databricks_schema: + 38: "my_schema": { + +Resource databricks_schema.my_schema has lifecycle.prevent_destroy set, but +the plan calls for this resource to be destroyed. To avoid this error and +continue with the plan, either disable lifecycle.prevent_destroy or reduce +the scope of the plan using the -target flag. + Exit code (musterr): 1 >>> errcode [CLI] bundle plan recreate pipelines.my_pipelines +recreate schemas.my_schema >>> [CLI] bundle deploy --auto-approve Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/prevent-destroy/default/files... +This action will result in the deletion or recreation of the following UC schemas. Any underlying data may be lost: + recreate schema my_schema + This action will result in the deletion or recreation of the following Lakeflow Declarative Pipelines along with the Streaming Tables (STs) and Materialized Views (MVs) managed by them. Recreating the pipelines will restore the defined STs and MVs through full refresh. Note that recreation is necessary when pipeline @@ -70,10 +137,14 @@ Deployment complete! >>> errcode [CLI] bundle plan delete pipelines.my_pipelines +delete schemas.my_schema >>> [CLI] bundle deploy --auto-approve Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/prevent-destroy/default/files... +This action will result in the deletion or recreation of the following UC schemas. Any underlying data may be lost: + delete schema my_schema + This action will result in the deletion or recreation of the following Lakeflow Declarative Pipelines along with the Streaming Tables (STs) and Materialized Views (MVs) managed by them. Recreating the pipelines will restore the defined STs and MVs through full refresh. Note that recreation is necessary when pipeline diff --git a/acceptance/bundle/lifecycle/prevent-destroy/output.txt b/acceptance/bundle/lifecycle/prevent-destroy/output.txt index 13b0ba466b..33bdbbc346 100644 --- a/acceptance/bundle/lifecycle/prevent-destroy/output.txt +++ b/acceptance/bundle/lifecycle/prevent-destroy/output.txt @@ -13,3 +13,5 @@ Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/prevent-destroy/de Deploying resources... Updating deployment state... Deployment complete! + +>>> errcode [CLI] bundle plan diff --git a/acceptance/bundle/lifecycle/prevent-destroy/resources/pipeline.yml b/acceptance/bundle/lifecycle/prevent-destroy/resources/pipeline.yml new file mode 100644 index 0000000000..075e31b8ad --- /dev/null +++ b/acceptance/bundle/lifecycle/prevent-destroy/resources/pipeline.yml @@ -0,0 +1,11 @@ +resources: + pipelines: + my_pipelines: + name: "test-pipeline" + libraries: + - notebook: + path: "../test-notebook.py" + lifecycle: + prevent_destroy: true + schema: test-schema + catalog: main diff --git a/acceptance/bundle/lifecycle/prevent-destroy/resources/schema.yml b/acceptance/bundle/lifecycle/prevent-destroy/resources/schema.yml new file mode 100644 index 0000000000..840f0c1cde --- /dev/null +++ b/acceptance/bundle/lifecycle/prevent-destroy/resources/schema.yml @@ -0,0 +1,7 @@ +resources: + schemas: + my_schema: + catalog_name: "test-catalog" + name: test-schema + lifecycle: + prevent_destroy: true diff --git a/acceptance/bundle/lifecycle/prevent-destroy/script b/acceptance/bundle/lifecycle/prevent-destroy/script index 9b018d0372..92ccac5a68 100644 --- a/acceptance/bundle/lifecycle/prevent-destroy/script +++ b/acceptance/bundle/lifecycle/prevent-destroy/script @@ -2,21 +2,29 @@ trace $CLI bundle validate trace $CLI bundle deploy -trace errcode $CLI bundle plan >out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 +trace errcode $CLI bundle plan trace musterr $CLI bundle destroy --auto-approve >>out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 # Changing the catalog name, deploy must fail because pipeline will be recreated -update_file.py databricks.yml 'catalog: main' 'catalog: mainnew' +update_file.py resources/pipeline.yml 'catalog: main' 'catalog: mainnew' +trace errcode $CLI bundle plan >>out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 +trace musterr $CLI bundle deploy >>out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 + +# Changing the schema name, deploy must fail because schema will be recreated +update_file.py resources/schema.yml 'name: test-schema' 'name: test-schema-new' trace errcode $CLI bundle plan >>out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 trace musterr $CLI bundle deploy >>out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 # Removing the prevent_destroy, deploy must succeed -update_file.py databricks.yml 'prevent_destroy: true' 'prevent_destroy: false' +update_file.py resources/pipeline.yml 'prevent_destroy: true' 'prevent_destroy: false' +update_file.py resources/schema.yml 'prevent_destroy: true' 'prevent_destroy: false' trace errcode $CLI bundle plan >>out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 trace $CLI bundle deploy --auto-approve >>out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 +update_file.py resources/pipeline.yml 'prevent_destroy: false' 'prevent_destroy: true' +update_file.py resources/schema.yml 'prevent_destroy: false' 'prevent_destroy: true' + -update_file.py databricks.yml 'prevent_destroy: false' 'prevent_destroy: true' -# Removing the pipeline, deploy must succeed -update_file.py databricks.yml '<<: *pipeline_base' '' +# Removing the pipeline and schema, deploy must succeed +rm resources/pipeline.yml resources/schema.yml trace errcode $CLI bundle plan >>out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 trace $CLI bundle deploy --auto-approve >>out.$DATABRICKS_CLI_DEPLOYMENT.txt 2>&1 diff --git a/acceptance/bundle/lifecycle/prevent-destroy/test.toml b/acceptance/bundle/lifecycle/prevent-destroy/test.toml index 71228fe351..3b15472573 100644 --- a/acceptance/bundle/lifecycle/prevent-destroy/test.toml +++ b/acceptance/bundle/lifecycle/prevent-destroy/test.toml @@ -3,3 +3,30 @@ EnvVaryOutput = "DATABRICKS_CLI_DEPLOYMENT" Ignore = [ ".databricks" ] + +[[Server]] +Pattern = "POST /api/2.0/serving-endpoints" +Response.Body = ''' +{ + "id": "test-endpoint-6260d50f-e8ff-4905-8f28-812345678903", + "name": "test-endpoint-6260d50f-e8ff-4905-8f28-812345678903" +} +''' + +[[Server]] +Pattern = "GET /api/2.0/serving-endpoints/" + + +[[Server]] +Pattern = "GET /api/2.0/serving-endpoints/test-endpoint-6260d50f-e8ff-4905-8f28-812345678903" +Response.Body = ''' +{ + "id": "test-endpoint-6260d50f-e8ff-4905-8f28-812345678903", + "permission_level": "CAN_MANAGE", + "route_optimized": false, + "state": { + "config_update": "NOT_UPDATING", + "ready": "NOT_READY" + } +} +''' From 42faa7b527bba12f37f85f9da14eb410df168edd Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Tue, 9 Sep 2025 15:52:59 +0200 Subject: [PATCH 19/19] show all failed resources --- .../bundle/lifecycle/prevent-destroy/out.direct-exp.txt | 2 ++ bundle/phases/plan.go | 7 +++++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/acceptance/bundle/lifecycle/prevent-destroy/out.direct-exp.txt b/acceptance/bundle/lifecycle/prevent-destroy/out.direct-exp.txt index 474f0d4c50..0c27dcdea3 100644 --- a/acceptance/bundle/lifecycle/prevent-destroy/out.direct-exp.txt +++ b/acceptance/bundle/lifecycle/prevent-destroy/out.direct-exp.txt @@ -1,6 +1,7 @@ >>> musterr [CLI] bundle destroy --auto-approve Error: resource my_pipelines has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. To avoid this error, disable lifecycle.prevent_destroy for pipelines.my_pipelines +resource my_schema has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. To avoid this error, disable lifecycle.prevent_destroy for schemas.my_schema Exit code (musterr): 1 @@ -22,6 +23,7 @@ recreate schemas.my_schema >>> musterr [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/prevent-destroy/default/files... Error: resource my_pipelines has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. To avoid this error, disable lifecycle.prevent_destroy for pipelines.my_pipelines +resource my_schema has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. To avoid this error, disable lifecycle.prevent_destroy for schemas.my_schema Exit code (musterr): 1 diff --git a/bundle/phases/plan.go b/bundle/phases/plan.go index 0b0ef06c1e..65e826118a 100644 --- a/bundle/phases/plan.go +++ b/bundle/phases/plan.go @@ -2,6 +2,7 @@ package phases import ( "context" + "errors" "fmt" "github.com/databricks/cli/bundle" @@ -54,6 +55,7 @@ func deployPrepare(ctx context.Context, b *bundle.Bundle) map[string][]libraries // If it does, it returns an error. func checkForPreventDestroy(b *bundle.Bundle, actions []deployplan.Action) error { root := b.Config.Value() + var errs []error for _, action := range actions { if action.ActionType != deployplan.ActionTypeRecreate && action.ActionType != deployplan.ActionTypeDelete { continue @@ -71,8 +73,9 @@ func checkForPreventDestroy(b *bundle.Bundle, actions []deployplan.Action) error return fmt.Errorf("internal error: prevent_destroy is not a boolean for %s.%s", action.Group, action.Key) } if preventDestroy { - return fmt.Errorf("resource %s has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. To avoid this error, disable lifecycle.prevent_destroy for %s.%s", action.Key, action.Group, action.Key) + errs = append(errs, fmt.Errorf("resource %s has lifecycle.prevent_destroy set, but the plan calls for this resource to be recreated or destroyed. To avoid this error, disable lifecycle.prevent_destroy for %s.%s", action.Key, action.Group, action.Key)) } } - return nil + + return errors.Join(errs...) }