diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index e95cb06df8..666edf2d82 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -9,6 +9,7 @@ ### Dependency updates ### Bundles +* Add support for alerts to DABs ([#4004](https://github.com/databricks/cli/pull/4004)) * Allow `file://` URIs in job libraries to reference runtime filesystem paths (e.g., JARs pre-installed on clusters via init scripts). These paths are no longer treated as local files to upload. ([#3884](https://github.com/databricks/cli/pull/3884)) ### API Changes diff --git a/acceptance/bundle/deployment/bind/alert/alert.json b/acceptance/bundle/deployment/bind/alert/alert.json deleted file mode 100644 index 6e7406230c..0000000000 --- a/acceptance/bundle/deployment/bind/alert/alert.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "display_name": "Test Alert", - "query_text": "SELECT 1", - "warehouse_id": "0123-456789-warehouse0", - "custom_summary": "Test Alert Summary", - "custom_description": "Test Alert Description" -} diff --git a/acceptance/bundle/deployment/bind/alert/alert.json.tmpl b/acceptance/bundle/deployment/bind/alert/alert.json.tmpl new file mode 100644 index 0000000000..e65d68f847 --- /dev/null +++ b/acceptance/bundle/deployment/bind/alert/alert.json.tmpl @@ -0,0 +1,29 @@ +{ + "display_name": "test-alert-$UNIQUE_NAME", + "query_text": "SELECT 1", + "warehouse_id": "$TEST_DEFAULT_WAREHOUSE_ID", + "custom_summary": "Test Alert Summary", + "custom_description": "Test Alert Description", + "evaluation": { + "comparison_operator": "EQUAL", + "notification": { + "notify_on_ok": false, + "retrigger_seconds": 1 + }, + "source": { + "aggregation": "MAX", + "display": "1", + "name": "1" + }, + "threshold": { + "value": { + "double_value": 1 + } + } + }, + "schedule": { + "pause_status": "UNPAUSED", + "quartz_cron_schedule": "0 0 * * * ?", + "timezone_id": "UTC" + } +} diff --git a/acceptance/bundle/deployment/bind/alert/databricks.yml b/acceptance/bundle/deployment/bind/alert/databricks.yml deleted file mode 100644 index a23cc7ce18..0000000000 --- a/acceptance/bundle/deployment/bind/alert/databricks.yml +++ /dev/null @@ -1,8 +0,0 @@ -resources: - alerts: - my_alert: - display_name: test-alert - query_text: "SELECT 1" - warehouse_id: "test-sql-warehouse" - custom_summary: "test-alert-summary" - custom_description: "test-alert-description" diff --git a/acceptance/bundle/deployment/bind/alert/databricks.yml.tmpl b/acceptance/bundle/deployment/bind/alert/databricks.yml.tmpl new file mode 100644 index 0000000000..05e5165e97 --- /dev/null +++ b/acceptance/bundle/deployment/bind/alert/databricks.yml.tmpl @@ -0,0 +1,19 @@ +bundle: + name: test-bind-alert-$UNIQUE_NAME + +resources: + alerts: + my_alert: + display_name: test-alert + evaluation: + comparison_operator: EQUAL + source: + name: "1" + threshold: + value: + double_value: 2 + query_text: select 2 + schedule: + quartz_cron_schedule: "44 19 */1 * * ?" + timezone_id: Europe/Amsterdam + warehouse_id: aaaaaaaaaaaaaaaa diff --git a/acceptance/bundle/deployment/bind/alert/out.test.toml b/acceptance/bundle/deployment/bind/alert/out.test.toml index f9eb74f070..e1a07763f6 100644 --- a/acceptance/bundle/deployment/bind/alert/out.test.toml +++ b/acceptance/bundle/deployment/bind/alert/out.test.toml @@ -1,5 +1,8 @@ Local = false -Cloud = false +Cloud = true + +[CloudEnvs] + aws = false [EnvMatrix] DATABRICKS_BUNDLE_ENGINE = ["terraform"] diff --git a/acceptance/bundle/deployment/bind/alert/output.txt b/acceptance/bundle/deployment/bind/alert/output.txt index d37176b247..27949d29d1 100644 --- a/acceptance/bundle/deployment/bind/alert/output.txt +++ b/acceptance/bundle/deployment/bind/alert/output.txt @@ -1,32 +1,32 @@ >>> [CLI] alerts-v2 create-alert --json @alert.json ->>> [CLI] bundle deployment bind my_alert [UUID] --auto-approve +>>> [CLI] bundle deployment bind my_alert [ALERT_ID] --auto-approve Updating deployment state... -Successfully bound alert with an id '[UUID]' +Successfully bound alert with an id '[ALERT_ID]' Run 'bundle deploy' to deploy changes to your workspace >>> [CLI] bundle summary -Name: test-bundle-$UNIQUE_NAME +Name: test-bind-alert-[UNIQUE_NAME] Target: default Workspace: User: [USERNAME] - Path: /Workspace/Users/[USERNAME]/.bundle/test-bundle-$UNIQUE_NAME/default + Path: /Workspace/Users/[USERNAME]/.bundle/test-bind-alert-[UNIQUE_NAME]/default Resources: Alerts: my_alert: Name: test-alert - URL: [DATABRICKS_URL]/sql/alerts-v2/[UUID]?o=[NUMID] + URL: [DATABRICKS_URL]/sql/alerts-v2/[ALERT_ID] >>> [CLI] bundle deployment unbind my_alert Updating deployment state... >>> [CLI] bundle summary -Name: test-bundle-$UNIQUE_NAME +Name: test-bind-alert-[UNIQUE_NAME] Target: default Workspace: User: [USERNAME] - Path: /Workspace/Users/[USERNAME]/.bundle/test-bundle-$UNIQUE_NAME/default + Path: /Workspace/Users/[USERNAME]/.bundle/test-bind-alert-[UNIQUE_NAME]/default Resources: Alerts: my_alert: diff --git a/acceptance/bundle/deployment/bind/alert/script b/acceptance/bundle/deployment/bind/alert/script index 024721873b..88e12f15c3 100644 --- a/acceptance/bundle/deployment/bind/alert/script +++ b/acceptance/bundle/deployment/bind/alert/script @@ -1,4 +1,8 @@ +envsubst < databricks.yml.tmpl > databricks.yml +envsubst < alert.json.tmpl > alert.json + alert_id=$(trace $CLI alerts-v2 create-alert --json @alert.json | jq -r '.id') +echo "$alert_id:ALERT_ID" >> ACC_REPLS trace $CLI bundle deployment bind my_alert $alert_id --auto-approve trace $CLI bundle summary diff --git a/acceptance/bundle/deployment/bind/alert/test.toml b/acceptance/bundle/deployment/bind/alert/test.toml index 13cff7b7f4..f1ac73233a 100644 --- a/acceptance/bundle/deployment/bind/alert/test.toml +++ b/acceptance/bundle/deployment/bind/alert/test.toml @@ -1,4 +1,12 @@ -Cloud = false -Local = false # Enable when releasing support for alerts. +Cloud = true +Local = false BundleConfigTarget = "databricks.yml" + +# On aws the host URL includes the workspace ID as well. Thus skipping it to keep the test simple. +CloudEnvs.aws = false + +Ignore = [ + "databricks.yml", + "alert.json", +] diff --git a/acceptance/bundle/refschema/out.fields.txt b/acceptance/bundle/refschema/out.fields.txt index f1a7936ac7..9c46b13247 100644 --- a/acceptance/bundle/refschema/out.fields.txt +++ b/acceptance/bundle/refschema/out.fields.txt @@ -55,6 +55,13 @@ resources.alerts.*.schedule.timezone_id string ALL resources.alerts.*.update_time string ALL resources.alerts.*.url string INPUT resources.alerts.*.warehouse_id string ALL +resources.alerts.*.permissions.object_id string ALL +resources.alerts.*.permissions.permissions []iam.AccessControlRequest ALL +resources.alerts.*.permissions.permissions[*] iam.AccessControlRequest ALL +resources.alerts.*.permissions.permissions[*].group_name string ALL +resources.alerts.*.permissions.permissions[*].permission_level iam.PermissionLevel ALL +resources.alerts.*.permissions.permissions[*].service_principal_name string ALL +resources.alerts.*.permissions.permissions[*].user_name string ALL resources.apps.*.active_deployment *apps.AppDeployment ALL resources.apps.*.active_deployment.create_time string ALL resources.apps.*.active_deployment.creator string ALL diff --git a/acceptance/bundle/resources/alerts/basic/databricks.yml.tmpl b/acceptance/bundle/resources/alerts/basic/databricks.yml.tmpl index 022e43a81e..36f1183b85 100644 --- a/acceptance/bundle/resources/alerts/basic/databricks.yml.tmpl +++ b/acceptance/bundle/resources/alerts/basic/databricks.yml.tmpl @@ -5,8 +5,8 @@ resources: alerts: myalert: permissions: - - level: CAN_RUN - group_name: users + - level: CAN_MANAGE + user_name: deco-test-user@databricks.com custom_summary: "My alert" display_name: "My alert" @@ -27,4 +27,4 @@ resources: pause_status: "UNPAUSED" quartz_cron_schedule: "44 19 */1 * * ?" timezone_id: "Europe/Amsterdam" - warehouse_id: "dd43ee29fedd958d" + warehouse_id: $TEST_DEFAULT_WAREHOUSE_ID diff --git a/acceptance/bundle/resources/alerts/basic/out.test.toml b/acceptance/bundle/resources/alerts/basic/out.test.toml index d560f1de04..01ed6822af 100644 --- a/acceptance/bundle/resources/alerts/basic/out.test.toml +++ b/acceptance/bundle/resources/alerts/basic/out.test.toml @@ -1,5 +1,5 @@ Local = true -Cloud = false +Cloud = true [EnvMatrix] DATABRICKS_BUNDLE_ENGINE = ["terraform", "direct"] diff --git a/acceptance/bundle/resources/alerts/basic/output.txt b/acceptance/bundle/resources/alerts/basic/output.txt index e978b67eff..7ca1ad4eb9 100644 --- a/acceptance/bundle/resources/alerts/basic/output.txt +++ b/acceptance/bundle/resources/alerts/basic/output.txt @@ -1,22 +1,80 @@ >>> [CLI] bundle deploy -Warning: unknown field: alerts - at resources - in databricks.yml:5:3 - Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/alerts-basic-[UNIQUE_NAME]/default/files... Deploying resources... +Updating deployment state... Deployment complete! -Warning: unknown field: alerts - at resources - in databricks.yml:5:3 +>>> [CLI] alerts-v2 get-alert [ALERT_ID] +{ + "display_name": "My alert", + "lifecycle_state": "ACTIVE", + "custom_summary": "My alert", + "evaluation": { + "comparison_operator": "EQUAL", + "notification": { + "notify_on_ok": false, + "retrigger_seconds": 1 + }, + "source": { + "aggregation": "MAX", + "display": "1", + "name": "1" + }, + "threshold": { + "value": { + "double_value": 2 + } + } + }, + "query_text": "select 2", + "schedule": { + "pause_status": "UNPAUSED", + "quartz_cron_schedule": "44 19 */1 * * ?", + "timezone_id": "Europe/Amsterdam" + }, + "warehouse_id": "[TEST_DEFAULT_WAREHOUSE_ID]" +} + +=== assert that permissions are applied +>>> [CLI] permissions get alertsv2 [ALERT_ID] +{ + "user_name": "deco-test-user@databricks.com", + "all_permissions": [ + { + "inherited": false, + "permission_level": "CAN_MANAGE" + } + ] +} + +=== assert that no permanent drift happens +>>> [CLI] bundle plan +Plan: 0 to add, 0 to change, 0 to delete, 2 unchanged + +>>> [CLI] bundle destroy --auto-approve +The following resources will be deleted: + delete resources.alerts.myalert + +All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/alerts-basic-[UNIQUE_NAME]/default ->>> [CLI] alerts-v2 get-alert null -Warning: unknown field: alerts - at resources - in databricks.yml:5:3 +Deleting files... +Destroy complete! -Error: Resource sql.AlertV2 not found: null +>>> [CLI] alerts-v2 get-alert [ALERT_ID] +{ + "display_name": "My alert", + "lifecycle_state": "DELETED" +} -Exit code: 1 +>>> [CLI] bundle summary +Name: alerts-basic-[UNIQUE_NAME] +Target: default +Workspace: + User: [USERNAME] + Path: /Workspace/Users/[USERNAME]/.bundle/alerts-basic-[UNIQUE_NAME]/default +Resources: + Alerts: + myalert: + Name: My alert + URL: (not deployed) diff --git a/acceptance/bundle/resources/alerts/basic/script b/acceptance/bundle/resources/alerts/basic/script index 19e05a31a4..16f6feeb8b 100644 --- a/acceptance/bundle/resources/alerts/basic/script +++ b/acceptance/bundle/resources/alerts/basic/script @@ -4,9 +4,15 @@ trace $CLI bundle deploy alert_id=$($CLI bundle summary --output json | jq -r '.resources.alerts.myalert.id') -trace $CLI alerts-v2 get-alert $alert_id | jq '{display_name, lifecycle_state}' +echo "$alert_id:ALERT_ID" >> ACC_REPLS + +trace $CLI alerts-v2 get-alert $alert_id | jq '{display_name, lifecycle_state, custom_summary, evaluation, query_text, schedule, warehouse_id}' + +title "assert that permissions are applied" +trace $CLI permissions get alertsv2 $alert_id | jq ".access_control_list.[]" -c | grep 'deco-test-user@databricks.com' | jq '{user_name, all_permissions}' -trace $CLI permissions get alertsv2 $alert_id | jq '{access_control_list: [.access_control_list[] | select(any(.all_permissions[]; .permission_level == "CAN_RUN"))], object_id, object_type}' +title "assert that no permanent drift happens" +trace $CLI bundle plan trace $CLI bundle destroy --auto-approve diff --git a/acceptance/bundle/resources/alerts/basic/test.toml b/acceptance/bundle/resources/alerts/basic/test.toml index 5de630bdf8..8f2cd86d82 100644 --- a/acceptance/bundle/resources/alerts/basic/test.toml +++ b/acceptance/bundle/resources/alerts/basic/test.toml @@ -1,4 +1,4 @@ Local = true -Cloud = false +Cloud = true RecordRequests = false Ignore = [".databricks"] diff --git a/acceptance/bundle/summary/modified_status/databricks.yml b/acceptance/bundle/summary/modified_status/databricks.yml index 2bc709b73e..7e940c721a 100644 --- a/acceptance/bundle/summary/modified_status/databricks.yml +++ b/acceptance/bundle/summary/modified_status/databricks.yml @@ -21,7 +21,15 @@ resources: alerts: my_alert: display_name: test-alert - query_text: "SELECT 1" - warehouse_id: "test-sql-warehouse" - custom_summary: "test-alert-summary" - custom_description: "test-alert-description" + evaluation: + comparison_operator: EQUAL + source: + name: "1" + threshold: + value: + double_value: 2 + query_text: select 2 + schedule: + quartz_cron_schedule: "44 19 */1 * * ?" + timezone_id: Europe/Amsterdam + warehouse_id: aaaaaaaaaaaaaaaa diff --git a/acceptance/bundle/summary/modified_status/output.txt b/acceptance/bundle/summary/modified_status/output.txt index 974a3fe13c..a3c123eb03 100644 --- a/acceptance/bundle/summary/modified_status/output.txt +++ b/acceptance/bundle/summary/modified_status/output.txt @@ -1,11 +1,31 @@ === Initial view of resources without id and modified_status=created >>> [CLI] bundle summary -o json -Warning: unknown field: alerts - at resources - in databricks.yml:4:3 - { + "alerts": { + "my_alert": { + "display_name": "test-alert", + "evaluation": { + "comparison_operator": "EQUAL", + "source": { + "name": "1" + }, + "threshold": { + "value": { + "double_value": 2 + } + } + }, + "modified_status": "created", + "parent_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/resources", + "query_text": "select 2", + "schedule": { + "quartz_cron_schedule": "44 19 */1 * * ?", + "timezone_id": "Europe/Amsterdam" + }, + "warehouse_id": "aaaaaaaaaaaaaaaa" + } + }, "pipelines": { "my_pipeline": { "channel": "CURRENT", @@ -48,10 +68,6 @@ Warning: unknown field: alerts } >>> [CLI] bundle deploy -Warning: unknown field: alerts - at resources - in databricks.yml:4:3 - Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files... Deploying resources... Updating deployment state... @@ -59,11 +75,32 @@ Deployment complete! === Post-deployment view of resources with id and without modified_status >>> [CLI] bundle summary -o json -Warning: unknown field: alerts - at resources - in databricks.yml:4:3 - { + "alerts": { + "my_alert": { + "display_name": "test-alert", + "evaluation": { + "comparison_operator": "EQUAL", + "source": { + "name": "1" + }, + "threshold": { + "value": { + "double_value": 2 + } + } + }, + "id": "[UUID]", + "parent_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/resources", + "query_text": "select 2", + "schedule": { + "quartz_cron_schedule": "44 19 */1 * * ?", + "timezone_id": "Europe/Amsterdam" + }, + "url": "[DATABRICKS_URL]/sql/alerts-v2/[UUID]?o=[NUMID]", + "warehouse_id": "aaaaaaaaaaaaaaaa" + } + }, "pipelines": { "my_pipeline": { "channel": "CURRENT", @@ -111,6 +148,13 @@ Warning: unknown field: alerts === Expecting all resources to have modified_status=deleted >>> [CLI] bundle summary -o json { + "alerts": { + "my_alert": { + "id": "[UUID]", + "modified_status": "deleted", + "url": "[DATABRICKS_URL]/sql/alerts-v2/[UUID]?o=[NUMID]" + } + }, "pipelines": { "my_pipeline": { "id": "[UUID]", @@ -136,6 +180,7 @@ Warning: unknown field: alerts >>> [CLI] bundle destroy --auto-approve The following resources will be deleted: + delete resources.alerts.my_alert delete resources.pipelines.my_pipeline delete resources.schemas.my_schema delete resources.sql_warehouses.my_sql_warehouse diff --git a/acceptance/bundle/validate/empty_resources/empty_def/output.txt b/acceptance/bundle/validate/empty_resources/empty_def/output.txt index 85b27f9899..5e4ac03100 100644 --- a/acceptance/bundle/validate/empty_resources/empty_def/output.txt +++ b/acceptance/bundle/validate/empty_resources/empty_def/output.txt @@ -143,8 +143,12 @@ Error: secret_scope rname is not defined } === resources.alerts.rname === -Warning: unknown field: alerts - at resources - in databricks.yml:5:3 +Error: alert rname is not defined + at resources.alerts.rname + in databricks.yml:6:11 -{} +{ + "alerts": { + "rname": null + } +} diff --git a/acceptance/bundle/validate/empty_resources/empty_dict/output.txt b/acceptance/bundle/validate/empty_resources/empty_dict/output.txt index f2692be0f6..bc0635fe8c 100644 --- a/acceptance/bundle/validate/empty_resources/empty_dict/output.txt +++ b/acceptance/bundle/validate/empty_resources/empty_dict/output.txt @@ -187,8 +187,30 @@ Warning: required field "name" is not set } === resources.alerts.rname === -Warning: unknown field: alerts - at resources - in databricks.yml:5:3 +Warning: required field "display_name" is not set + at resources.alerts.rname + in databricks.yml:6:12 + +Warning: required field "evaluation" is not set + at resources.alerts.rname + in databricks.yml:6:12 + +Warning: required field "query_text" is not set + at resources.alerts.rname + in databricks.yml:6:12 + +Warning: required field "schedule" is not set + at resources.alerts.rname + in databricks.yml:6:12 -{} +Warning: required field "warehouse_id" is not set + at resources.alerts.rname + in databricks.yml:6:12 + +{ + "alerts": { + "rname": { + "parent_path": "/Workspace/Users/[USERNAME]/.bundle/BUNDLE/default/resources" + } + } +} diff --git a/acceptance/bundle/validate/empty_resources/null/output.txt b/acceptance/bundle/validate/empty_resources/null/output.txt index cedc5af0d3..1c64bbb717 100644 --- a/acceptance/bundle/validate/empty_resources/null/output.txt +++ b/acceptance/bundle/validate/empty_resources/null/output.txt @@ -143,8 +143,12 @@ Error: secret_scope rname is not defined } === resources.alerts.rname === -Warning: unknown field: alerts - at resources - in databricks.yml:5:3 +Error: alert rname is not defined + at resources.alerts.rname + in databricks.yml:6:12 -{} +{ + "alerts": { + "rname": null + } +} diff --git a/acceptance/bundle/validate/empty_resources/with_grants/output.txt b/acceptance/bundle/validate/empty_resources/with_grants/output.txt index 4bd690c2e2..221a4c10a7 100644 --- a/acceptance/bundle/validate/empty_resources/with_grants/output.txt +++ b/acceptance/bundle/validate/empty_resources/with_grants/output.txt @@ -232,8 +232,34 @@ Warning: required field "name" is not set } === resources.alerts.rname === -Warning: unknown field: alerts - at resources - in databricks.yml:5:3 +Warning: unknown field: grants + at resources.alerts.rname + in databricks.yml:7:7 + +Warning: required field "display_name" is not set + at resources.alerts.rname + in databricks.yml:7:7 + +Warning: required field "evaluation" is not set + at resources.alerts.rname + in databricks.yml:7:7 + +Warning: required field "query_text" is not set + at resources.alerts.rname + in databricks.yml:7:7 + +Warning: required field "schedule" is not set + at resources.alerts.rname + in databricks.yml:7:7 + +Warning: required field "warehouse_id" is not set + at resources.alerts.rname + in databricks.yml:7:7 -{} +{ + "alerts": { + "rname": { + "parent_path": "/Workspace/Users/[USERNAME]/.bundle/BUNDLE/default/resources" + } + } +} diff --git a/acceptance/bundle/validate/empty_resources/with_permissions/output.txt b/acceptance/bundle/validate/empty_resources/with_permissions/output.txt index 48ddbb156d..87adda9e5e 100644 --- a/acceptance/bundle/validate/empty_resources/with_permissions/output.txt +++ b/acceptance/bundle/validate/empty_resources/with_permissions/output.txt @@ -203,8 +203,30 @@ Warning: required field "name" is not set } === resources.alerts.rname === -Warning: unknown field: alerts - at resources - in databricks.yml:5:3 +Warning: required field "display_name" is not set + at resources.alerts.rname + in databricks.yml:7:7 + +Warning: required field "evaluation" is not set + at resources.alerts.rname + in databricks.yml:7:7 + +Warning: required field "query_text" is not set + at resources.alerts.rname + in databricks.yml:7:7 + +Warning: required field "schedule" is not set + at resources.alerts.rname + in databricks.yml:7:7 -{} +Warning: required field "warehouse_id" is not set + at resources.alerts.rname + in databricks.yml:7:7 + +{ + "alerts": { + "rname": { + "parent_path": "/Workspace/Users/[USERNAME]/.bundle/BUNDLE/default/resources" + } + } +} diff --git a/bundle/config/mutator/resourcemutator/apply_presets.go b/bundle/config/mutator/resourcemutator/apply_presets.go index 6d1eb16d72..1311f3660f 100644 --- a/bundle/config/mutator/resourcemutator/apply_presets.go +++ b/bundle/config/mutator/resourcemutator/apply_presets.go @@ -240,12 +240,12 @@ func (m *applyPresets) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnos // Apps: No presets // Alerts: Prefix - // for _, a := range r.Alerts { - // if a == nil { - // continue - // } - // a.DisplayName = prefix + a.DisplayName - // } + for _, a := range r.Alerts { + if a == nil { + continue + } + a.DisplayName = prefix + a.DisplayName + } // SQL Warehouses: Prefix, Tags for _, w := range r.SqlWarehouses { diff --git a/bundle/config/mutator/resourcemutator/apply_target_mode_test.go b/bundle/config/mutator/resourcemutator/apply_target_mode_test.go index afce55f98e..de3aa992af 100644 --- a/bundle/config/mutator/resourcemutator/apply_target_mode_test.go +++ b/bundle/config/mutator/resourcemutator/apply_target_mode_test.go @@ -190,13 +190,13 @@ func mockBundle(mode config.Mode) *bundle.Bundle { }, }, }, - // Alerts: map[string]*resources.Alert{ - // "alert1": { - // AlertV2: sql.AlertV2{ - // DisplayName: "alert1", - // }, - // }, - // }, + Alerts: map[string]*resources.Alert{ + "alert1": { + AlertV2: sql.AlertV2{ + DisplayName: "alert1", + }, + }, + }, }, }, SyncRoot: vfs.MustNew("/Users/lennart.kats@databricks.com"), diff --git a/bundle/config/mutator/resourcemutator/run_as.go b/bundle/config/mutator/resourcemutator/run_as.go index 499f9ae293..8433db9ed8 100644 --- a/bundle/config/mutator/resourcemutator/run_as.go +++ b/bundle/config/mutator/resourcemutator/run_as.go @@ -110,15 +110,15 @@ func validateRunAs(b *bundle.Bundle) diag.Diagnostics { )) } - // // Alerts do not support run_as in the API. - // if len(b.Config.Resources.Alerts) > 0 { - // diags = diags.Extend(reportRunAsNotSupported( - // "alerts", - // b.Config.GetLocation("resources.alerts"), - // b.Config.Workspace.CurrentUser.UserName, - // identity, - // )) - // } + // Alerts do not support run_as in the API. + if len(b.Config.Resources.Alerts) > 0 { + diags = diags.Extend(reportRunAsNotSupported( + "alerts", + b.Config.GetLocation("resources.alerts"), + b.Config.Workspace.CurrentUser.UserName, + identity, + )) + } // Apps do not support run_as in the API. if len(b.Config.Resources.Apps) > 0 { diff --git a/bundle/config/mutator/resourcemutator/run_as_test.go b/bundle/config/mutator/resourcemutator/run_as_test.go index 93fdb6e457..7500374adc 100644 --- a/bundle/config/mutator/resourcemutator/run_as_test.go +++ b/bundle/config/mutator/resourcemutator/run_as_test.go @@ -32,7 +32,7 @@ func allResourceTypes(t *testing.T) []string { // the dyn library gives us the correct list of all resources supported. Please // also update this check when adding a new resource require.Equal(t, []string{ - // "alerts", + "alerts", "apps", "clusters", "dashboards", diff --git a/bundle/config/resources.go b/bundle/config/resources.go index 228bf16704..4cb5da53ce 100644 --- a/bundle/config/resources.go +++ b/bundle/config/resources.go @@ -25,11 +25,11 @@ type Resources struct { Dashboards map[string]*resources.Dashboard `json:"dashboards,omitempty"` Apps map[string]*resources.App `json:"apps,omitempty"` SecretScopes map[string]*resources.SecretScope `json:"secret_scopes,omitempty"` - // Alerts map[string]*resources.Alert `json:"alerts,omitempty"` - SqlWarehouses map[string]*resources.SqlWarehouse `json:"sql_warehouses,omitempty"` - DatabaseInstances map[string]*resources.DatabaseInstance `json:"database_instances,omitempty"` - DatabaseCatalogs map[string]*resources.DatabaseCatalog `json:"database_catalogs,omitempty"` - SyncedDatabaseTables map[string]*resources.SyncedDatabaseTable `json:"synced_database_tables,omitempty"` + Alerts map[string]*resources.Alert `json:"alerts,omitempty"` + SqlWarehouses map[string]*resources.SqlWarehouse `json:"sql_warehouses,omitempty"` + DatabaseInstances map[string]*resources.DatabaseInstance `json:"database_instances,omitempty"` + DatabaseCatalogs map[string]*resources.DatabaseCatalog `json:"database_catalogs,omitempty"` + SyncedDatabaseTables map[string]*resources.SyncedDatabaseTable `json:"synced_database_tables,omitempty"` } type ConfigResource interface { @@ -92,7 +92,7 @@ func (r *Resources) AllResources() []ResourceGroup { collectResourceMap(descriptions["dashboards"], r.Dashboards), collectResourceMap(descriptions["volumes"], r.Volumes), collectResourceMap(descriptions["apps"], r.Apps), - // collectResourceMap(descriptions["alerts"], r.Alerts), + collectResourceMap(descriptions["alerts"], r.Alerts), collectResourceMap(descriptions["secret_scopes"], r.SecretScopes), collectResourceMap(descriptions["sql_warehouses"], r.SqlWarehouses), collectResourceMap(descriptions["database_instances"], r.DatabaseInstances), @@ -175,11 +175,11 @@ func (r *Resources) FindResourceByConfigKey(key string) (ConfigResource, error) } } - // for k := range r.Alerts { - // if k == key { - // found = append(found, r.Alerts[k]) - // } - // } + for k := range r.Alerts { + if k == key { + found = append(found, r.Alerts[k]) + } + } for k := range r.SqlWarehouses { if k == key { @@ -236,10 +236,10 @@ func SupportedResources() map[string]resources.ResourceDescription { "volumes": (&resources.Volume{}).ResourceDescription(), "apps": (&resources.App{}).ResourceDescription(), "secret_scopes": (&resources.SecretScope{}).ResourceDescription(), - // "alerts": (&resources.Alert{}).ResourceDescription(), - "sql_warehouses": (&resources.SqlWarehouse{}).ResourceDescription(), - "database_instances": (&resources.DatabaseInstance{}).ResourceDescription(), - "database_catalogs": (&resources.DatabaseCatalog{}).ResourceDescription(), - "synced_database_tables": (&resources.SyncedDatabaseTable{}).ResourceDescription(), + "alerts": (&resources.Alert{}).ResourceDescription(), + "sql_warehouses": (&resources.SqlWarehouse{}).ResourceDescription(), + "database_instances": (&resources.DatabaseInstance{}).ResourceDescription(), + "database_catalogs": (&resources.DatabaseCatalog{}).ResourceDescription(), + "synced_database_tables": (&resources.SyncedDatabaseTable{}).ResourceDescription(), } } diff --git a/bundle/config/resources/permission.go b/bundle/config/resources/permission.go index 1515c3ba51..d7c3f6fca9 100644 --- a/bundle/config/resources/permission.go +++ b/bundle/config/resources/permission.go @@ -148,7 +148,6 @@ type SqlWarehousePermission struct { } // GetAPIRequestObjectType is used by direct to construct a request to permissions API: -// Untested, since we don't have alerts // https://github.com/databricks/terraform-provider-databricks/blob/430902d/permissions/permission_definitions.go#L775C24-L775C32 func (p AlertPermission) GetAPIRequestObjectType() string { return "/alertsv2/" } func (p AppPermission) GetAPIRequestObjectType() string { return "/apps/" } diff --git a/bundle/config/resources_test.go b/bundle/config/resources_test.go index 4d0b99f5e9..6bc61196e2 100644 --- a/bundle/config/resources_test.go +++ b/bundle/config/resources_test.go @@ -160,11 +160,11 @@ func TestResourcesBindSupport(t *testing.T) { App: apps.App{}, }, }, - // Alerts: map[string]*resources.Alert{ - // "my_alert": { - // AlertV2: sql.AlertV2{}, - // }, - // }, + Alerts: map[string]*resources.Alert{ + "my_alert": { + AlertV2: sql.AlertV2{}, + }, + }, QualityMonitors: map[string]*resources.QualityMonitor{ "my_quality_monitor": { CreateMonitor: catalog.CreateMonitor{}, @@ -214,7 +214,7 @@ func TestResourcesBindSupport(t *testing.T) { m.GetMockLakeviewAPI().EXPECT().Get(mock.Anything, mock.Anything).Return(nil, nil) m.GetMockVolumesAPI().EXPECT().Read(mock.Anything, mock.Anything).Return(nil, nil) m.GetMockAppsAPI().EXPECT().GetByName(mock.Anything, mock.Anything).Return(nil, nil) - // m.GetMockAlertsV2API().EXPECT().GetAlertById(mock.Anything, mock.Anything).Return(nil, nil) + m.GetMockAlertsV2API().EXPECT().GetAlertById(mock.Anything, mock.Anything).Return(nil, nil) m.GetMockQualityMonitorsAPI().EXPECT().Get(mock.Anything, mock.Anything).Return(nil, nil) m.GetMockServingEndpointsAPI().EXPECT().Get(mock.Anything, mock.Anything).Return(nil, nil) m.GetMockSecretsAPI().EXPECT().ListScopesAll(mock.Anything).Return([]workspace.SecretScope{ diff --git a/bundle/deploy/resource_path_mkdir.go b/bundle/deploy/resource_path_mkdir.go index a59028d5c2..9a1610becb 100644 --- a/bundle/deploy/resource_path_mkdir.go +++ b/bundle/deploy/resource_path_mkdir.go @@ -2,9 +2,11 @@ package deploy import ( "context" + "errors" "github.com/databricks/cli/bundle" "github.com/databricks/cli/libs/diag" + "github.com/databricks/databricks-sdk-go/apierr" ) func ResourcePathMkdir() bundle.Mutator { @@ -18,25 +20,24 @@ func (m *resourcePathMkdir) Name() string { } func (m *resourcePathMkdir) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { - // // Only dashboards and alerts need ${workspace.resource_path} to exist. - // // - // // We'll defer to TF to create resource_path for for dashboards. - // // ref: https://github.com/databricks/terraform-provider-databricks/blob/d84bc6f28d8aa0fd77d110a41f745970299142b1/dashboards/resource_dashboard.go#L83. - // // - // // For alerts we proactively create it here because the TF implementation - // // is autogenerated and cannot be easily customized. - // if len(b.Config.Resources.Alerts) == 0 { - // return nil - // } - - // w := b.WorkspaceClient() - - // // Optimisitcally create the resource path. If it already exists ignore the error. - // err := w.Workspace.MkdirsByPath(ctx, b.Config.Workspace.ResourcePath) - // var aerr *apierr.APIError - // if errors.As(err, &aerr) && aerr.ErrorCode == "RESOURCE_ALREADY_EXISTS" { - // return nil - // } - // return diag.FromErr(err) - return nil + // Only dashboards and alerts need ${workspace.resource_path} to exist. + // + // We'll defer to TF to create resource_path for for dashboards. + // ref: https://github.com/databricks/terraform-provider-databricks/blob/d84bc6f28d8aa0fd77d110a41f745970299142b1/dashboards/resource_dashboard.go#L83. + // + // For alerts we proactively create it here because the TF implementation + // is autogenerated and cannot be easily customized. + if len(b.Config.Resources.Alerts) == 0 { + return nil + } + + w := b.WorkspaceClient() + + // Optimisitcally create the resource path. If it already exists ignore the error. + err := w.Workspace.MkdirsByPath(ctx, b.Config.Workspace.ResourcePath) + var aerr *apierr.APIError + if errors.As(err, &aerr) && aerr.ErrorCode == "RESOURCE_ALREADY_EXISTS" { + return nil + } + return diag.FromErr(err) } diff --git a/bundle/direct/dresources/alert.go b/bundle/direct/dresources/alert.go index 55104adaf4..5613649b9b 100644 --- a/bundle/direct/dresources/alert.go +++ b/bundle/direct/dresources/alert.go @@ -27,7 +27,16 @@ func (*ResourceAlert) PrepareState(input *resources.Alert) *sql.AlertV2 { // DoRead reads the alert by id. func (r *ResourceAlert) DoRead(ctx context.Context, id string) (*sql.AlertV2, error) { - return r.client.AlertsV2.GetAlertById(ctx, id) + alert, err := r.client.AlertsV2.GetAlertById(ctx, id) + if err != nil { + return nil, err + } + + // If the alert is already marked as thrashed, return a 404 on DoRead. + if alert.LifecycleState == sql.AlertLifecycleStateDeleted { + return nil, databricks.ErrResourceDoesNotExist + } + return alert, nil } // DoCreate creates the alert and returns its id. diff --git a/bundle/direct/dresources/all.go b/bundle/direct/dresources/all.go index 3d7a8ad88d..ed1bd6270f 100644 --- a/bundle/direct/dresources/all.go +++ b/bundle/direct/dresources/all.go @@ -28,6 +28,7 @@ var SupportedResources = map[string]any{ "jobs.permissions": (*ResourcePermissions)(nil), "pipelines.permissions": (*ResourcePermissions)(nil), "apps.permissions": (*ResourcePermissions)(nil), + "alerts.permissions": (*ResourcePermissions)(nil), "clusters.permissions": (*ResourcePermissions)(nil), "database_instances.permissions": (*ResourcePermissions)(nil), "experiments.permissions": (*ResourcePermissions)(nil), diff --git a/bundle/direct/dresources/all_test.go b/bundle/direct/dresources/all_test.go index fb76f3ca61..9a75e199dc 100644 --- a/bundle/direct/dresources/all_test.go +++ b/bundle/direct/dresources/all_test.go @@ -26,6 +26,7 @@ import ( "github.com/databricks/databricks-sdk-go/service/ml" "github.com/databricks/databricks-sdk-go/service/pipelines" "github.com/databricks/databricks-sdk-go/service/serving" + "github.com/databricks/databricks-sdk-go/service/sql" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -143,6 +144,29 @@ var testConfig map[string]any = map[string]any{ }, }, }, + + "alerts": &resources.Alert{ + AlertV2: sql.AlertV2{ + DisplayName: "my-alert", + QueryText: "SELECT 1", + WarehouseId: "test-warehouse-id", + Schedule: sql.CronSchedule{ + QuartzCronSchedule: "0 0 12 * * ?", + TimezoneId: "UTC", + }, + Evaluation: sql.AlertV2Evaluation{ + ComparisonOperator: sql.ComparisonOperatorGreaterThan, + Source: sql.AlertV2OperandColumn{ + Name: "column1", + }, + Threshold: &sql.AlertV2Operand{ + Column: &sql.AlertV2OperandColumn{ + Name: "column2", + }, + }, + }, + }, + }, } type prepareWorkspace func(client *databricks.WorkspaceClient) (any, error) @@ -356,6 +380,37 @@ var testDeps = map[string]prepareWorkspace{ }, nil }, + "alerts.permissions": func(client *databricks.WorkspaceClient) (any, error) { + resp, err := client.AlertsV2.CreateAlert(context.Background(), sql.CreateAlertV2Request{ + Alert: sql.AlertV2{ + DisplayName: "alert-permissions", + QueryText: "SELECT 1", + WarehouseId: "test-warehouse-id", + Schedule: sql.CronSchedule{ + QuartzCronSchedule: "0 0 12 * * ?", + TimezoneId: "UTC", + }, + Evaluation: sql.AlertV2Evaluation{ + ComparisonOperator: sql.ComparisonOperatorGreaterThan, + Source: sql.AlertV2OperandColumn{ + Name: "column1", + }, + }, + }, + }) + if err != nil { + return nil, err + } + + return &PermissionsState{ + ObjectID: "/alertsv2/" + resp.Id, + Permissions: []iam.AccessControlRequest{{ + PermissionLevel: "CAN_MANAGE", + UserName: "user@example.com", + }}, + }, nil + }, + "schemas.grants": func(client *databricks.WorkspaceClient) (any, error) { return &GrantsState{ SecurableType: "schema", diff --git a/bundle/internal/schema/annotations.yml b/bundle/internal/schema/annotations.yml index 85ab74e11d..29b2fd03e6 100644 --- a/bundle/internal/schema/annotations.yml +++ b/bundle/internal/schema/annotations.yml @@ -154,6 +154,9 @@ github.com/databricks/cli/bundle/config.Python: If enabled, Python code will execute within this environment. If disabled, it defaults to using the Python interpreter available in the current shell. github.com/databricks/cli/bundle/config.Resources: + "alerts": + "description": |- + PLACEHOLDER "apps": "description": |- The app resource defines a Databricks app. @@ -442,6 +445,62 @@ github.com/databricks/cli/bundle/config.Workspace: "state_path": "description": |- The workspace state path +github.com/databricks/cli/bundle/config/resources.Alert: + "create_time": + "description": |- + PLACEHOLDER + "custom_description": + "description": |- + PLACEHOLDER + "custom_summary": + "description": |- + PLACEHOLDER + "display_name": + "description": |- + PLACEHOLDER + "effective_run_as": + "description": |- + PLACEHOLDER + "id": + "description": |- + PLACEHOLDER + "lifecycle_state": + "description": |- + PLACEHOLDER + "owner_user_name": + "description": |- + PLACEHOLDER + "parent_path": + "description": |- + PLACEHOLDER + "query_text": + "description": |- + PLACEHOLDER + "run_as": + "description": |- + PLACEHOLDER + "run_as_user_name": + "description": |- + PLACEHOLDER + "update_time": + "description": |- + PLACEHOLDER + "warehouse_id": + "description": |- + PLACEHOLDER +github.com/databricks/cli/bundle/config/resources.AlertPermission: + "group_name": + "description": |- + PLACEHOLDER + "level": + "description": |- + PLACEHOLDER + "service_principal_name": + "description": |- + PLACEHOLDER + "user_name": + "description": |- + PLACEHOLDER github.com/databricks/cli/bundle/config/resources.AppPermission: "group_name": "description": |- diff --git a/bundle/internal/schema/annotations_openapi.yml b/bundle/internal/schema/annotations_openapi.yml index f13c29091c..14a8f3c124 100644 --- a/bundle/internal/schema/annotations_openapi.yml +++ b/bundle/internal/schema/annotations_openapi.yml @@ -1,4 +1,71 @@ # This file is auto-generated. DO NOT EDIT. +github.com/databricks/cli/bundle/config/resources.Alert: + "create_time": + "description": |- + The timestamp indicating when the alert was created. + "x-databricks-field-behaviors_output_only": |- + true + "custom_description": + "description": |- + Custom description for the alert. support mustache template. + "custom_summary": + "description": |- + Custom summary for the alert. support mustache template. + "display_name": + "description": |- + The display name of the alert. + "effective_run_as": + "description": |- + The actual identity that will be used to execute the alert. + This is an output-only field that shows the resolved run-as identity after applying + permissions and defaults. + "x-databricks-field-behaviors_output_only": |- + true + "evaluation": {} + "id": + "description": |- + UUID identifying the alert. + "x-databricks-field-behaviors_output_only": |- + true + "lifecycle_state": + "description": |- + Indicates whether the query is trashed. + "x-databricks-field-behaviors_output_only": |- + true + "owner_user_name": + "description": |- + The owner's username. This field is set to "Unavailable" if the user has been deleted. + "x-databricks-field-behaviors_output_only": |- + true + "parent_path": + "description": |- + The workspace path of the folder containing the alert. Can only be set on create, and cannot be updated. + "query_text": + "description": |- + Text of the query to be run. + "run_as": + "description": |- + Specifies the identity that will be used to run the alert. + This field allows you to configure alerts to run as a specific user or service principal. + - For user identity: Set `user_name` to the email of an active workspace user. Users can only set this to their own email. + - For service principal: Set `service_principal_name` to the application ID. Requires the `servicePrincipal/user` role. + If not specified, the alert will run as the request user. + "run_as_user_name": + "description": |- + The run as username or application ID of service principal. + On Create and Update, this field can be set to application ID of an active service principal. Setting this field requires the servicePrincipal/user role. + Deprecated: Use `run_as` field instead. This field will be removed in a future release. + "deprecation_message": |- + This field is deprecated + "schedule": {} + "update_time": + "description": |- + The timestamp indicating when the alert was updated. + "x-databricks-field-behaviors_output_only": |- + true + "warehouse_id": + "description": |- + ID of the SQL warehouse attached to the alert. github.com/databricks/cli/bundle/config/resources.App: "active_deployment": "description": |- @@ -1191,6 +1258,8 @@ github.com/databricks/databricks-sdk-go/service/apps.ComputeSize: MEDIUM - |- LARGE + - |- + LIQUID github.com/databricks/databricks-sdk-go/service/apps.ComputeState: "_": "enum": @@ -3811,6 +3880,10 @@ github.com/databricks/databricks-sdk-go/service/pipelines.IngestionSourceType: MYSQL - |- POSTGRESQL + - |- + REDSHIFT + - |- + SQLDW - |- SQLSERVER - |- @@ -3835,6 +3908,24 @@ github.com/databricks/databricks-sdk-go/service/pipelines.IngestionSourceType: SHAREPOINT - |- DYNAMICS365 + - |- + CONFLUENCE + - |- + META_MARKETING + - |- + GOOGLE_ADS + - |- + TIKTOK_ADS + - |- + SALESFORCE_MARKETING_CLOUD + - |- + HUBSPOT + - |- + WORKDAY_HCM + - |- + GUIDEWIRE + - |- + ZENDESK - |- FOREIGN_CATALOG github.com/databricks/databricks-sdk-go/service/pipelines.ManualTrigger: {} @@ -4819,6 +4910,106 @@ github.com/databricks/databricks-sdk-go/service/serving.TrafficConfig: "routes": "description": |- The list of routes that define traffic to each served entity. +github.com/databricks/databricks-sdk-go/service/sql.Aggregation: + "_": + "enum": + - |- + SUM + - |- + COUNT + - |- + COUNT_DISTINCT + - |- + AVG + - |- + MEDIAN + - |- + MIN + - |- + MAX + - |- + STDDEV +github.com/databricks/databricks-sdk-go/service/sql.AlertEvaluationState: + "_": + "description": |- + UNSPECIFIED - default unspecify value for proto enum, do not use it in the code + UNKNOWN - alert not yet evaluated + TRIGGERED - alert is triggered + OK - alert is not triggered + ERROR - alert evaluation failed + "enum": + - |- + UNKNOWN + - |- + TRIGGERED + - |- + OK + - |- + ERROR +github.com/databricks/databricks-sdk-go/service/sql.AlertLifecycleState: + "_": + "enum": + - |- + ACTIVE + - |- + DELETED +github.com/databricks/databricks-sdk-go/service/sql.AlertV2Evaluation: + "comparison_operator": + "description": |- + Operator used for comparison in alert evaluation. + "empty_result_state": + "description": |- + Alert state if result is empty. Please avoid setting this field to be `UNKNOWN` because `UNKNOWN` state is planned to be deprecated. + "last_evaluated_at": + "description": |- + Timestamp of the last evaluation. + "x-databricks-field-behaviors_output_only": |- + true + "notification": + "description": |- + User or Notification Destination to notify when alert is triggered. + "source": + "description": |- + Source column from result to use to evaluate alert + "state": + "description": |- + Latest state of alert evaluation. + "x-databricks-field-behaviors_output_only": |- + true + "threshold": + "description": |- + Threshold to user for alert evaluation, can be a column or a value. +github.com/databricks/databricks-sdk-go/service/sql.AlertV2Notification: + "notify_on_ok": + "description": |- + Whether to notify alert subscribers when alert returns back to normal. + "retrigger_seconds": + "description": |- + Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it can be triggered again. If 0 or not specified, the alert will not be triggered again. + "subscriptions": {} +github.com/databricks/databricks-sdk-go/service/sql.AlertV2Operand: + "column": {} + "value": {} +github.com/databricks/databricks-sdk-go/service/sql.AlertV2OperandColumn: + "aggregation": + "description": |- + If not set, the behavior is equivalent to using `First row` in the UI. + "display": {} + "name": {} +github.com/databricks/databricks-sdk-go/service/sql.AlertV2OperandValue: + "bool_value": {} + "double_value": {} + "string_value": {} +github.com/databricks/databricks-sdk-go/service/sql.AlertV2RunAs: + "service_principal_name": + "description": |- + Application ID of an active service principal. Setting this field requires the `servicePrincipal/user` role. + "user_name": + "description": |- + The email of an active workspace user. Can only set this field to their own email. +github.com/databricks/databricks-sdk-go/service/sql.AlertV2Subscription: + "destination_id": {} + "user_email": {} github.com/databricks/databricks-sdk-go/service/sql.Channel: "_": "description": |- @@ -4836,6 +5027,25 @@ github.com/databricks/databricks-sdk-go/service/sql.ChannelName: CHANNEL_NAME_PREVIOUS - |- CHANNEL_NAME_CUSTOM +github.com/databricks/databricks-sdk-go/service/sql.ComparisonOperator: + "_": + "enum": + - |- + LESS_THAN + - |- + GREATER_THAN + - |- + EQUAL + - |- + NOT_EQUAL + - |- + GREATER_THAN_OR_EQUAL + - |- + LESS_THAN_OR_EQUAL + - |- + IS_NULL + - |- + IS_NOT_NULL github.com/databricks/databricks-sdk-go/service/sql.CreateWarehouseRequestWarehouseType: "_": "enum": @@ -4845,11 +5055,31 @@ github.com/databricks/databricks-sdk-go/service/sql.CreateWarehouseRequestWareho CLASSIC - |- PRO +github.com/databricks/databricks-sdk-go/service/sql.CronSchedule: + "pause_status": + "description": |- + Indicate whether this schedule is paused or not. + "quartz_cron_schedule": + "description": |- + A cron expression using quartz syntax that specifies the schedule for this pipeline. + Should use the quartz format described here: http://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/tutorial-lesson-06.html + "timezone_id": + "description": |- + A Java timezone id. The schedule will be resolved using this timezone. + This will be combined with the quartz_cron_schedule to determine the schedule. + See https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html for details. github.com/databricks/databricks-sdk-go/service/sql.EndpointTagPair: "key": {} "value": {} github.com/databricks/databricks-sdk-go/service/sql.EndpointTags: "custom_tags": {} +github.com/databricks/databricks-sdk-go/service/sql.SchedulePauseStatus: + "_": + "enum": + - |- + UNPAUSED + - |- + PAUSED github.com/databricks/databricks-sdk-go/service/sql.SpotInstancePolicy: "_": "description": |- diff --git a/bundle/internal/schema/annotations_openapi_overrides.yml b/bundle/internal/schema/annotations_openapi_overrides.yml index 9778fa922f..427c184570 100644 --- a/bundle/internal/schema/annotations_openapi_overrides.yml +++ b/bundle/internal/schema/annotations_openapi_overrides.yml @@ -1,3 +1,27 @@ +github.com/databricks/cli/bundle/config/resources.Alert: + "evaluation": + "description": |- + PLACEHOLDER + "lifecycle": + "description": |- + PLACEHOLDER + "permissions": + "description": |- + PLACEHOLDER + "schedule": + "description": |- + PLACEHOLDER +github.com/databricks/cli/bundle/config/resources.AlertPermissionLevel: + "_": + "enum": + - |- + CAN_EDIT + - |- + CAN_MANAGE + - |- + CAN_READ + - |- + CAN_RUN github.com/databricks/cli/bundle/config/resources.App: "app_status": "description": |- @@ -465,12 +489,39 @@ github.com/databricks/cli/bundle/config/resources.RegisteredModel: - EXECUTE principal: account users ``` + "aliases": + "description": |- + PLACEHOLDER + "browse_only": + "description": |- + PLACEHOLDER + "created_at": + "description": |- + PLACEHOLDER + "created_by": + "description": |- + PLACEHOLDER + "full_name": + "description": |- + PLACEHOLDER "grants": "description": |- PLACEHOLDER "lifecycle": "description": |- Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed. + "metastore_id": + "description": |- + PLACEHOLDER + "owner": + "description": |- + PLACEHOLDER + "updated_at": + "description": |- + PLACEHOLDER + "updated_by": + "description": |- + PLACEHOLDER github.com/databricks/cli/bundle/config/resources.Schema: "_": "markdown_description": |- @@ -776,6 +827,19 @@ github.com/databricks/databricks-sdk-go/service/catalog.MonitorTimeSeries: "granularities": "description": |- Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year. +github.com/databricks/databricks-sdk-go/service/catalog.RegisteredModelAlias: + "catalog_name": + "description": |- + PLACEHOLDER + "id": + "description": |- + PLACEHOLDER + "model_name": + "description": |- + PLACEHOLDER + "schema_name": + "description": |- + PLACEHOLDER github.com/databricks/databricks-sdk-go/service/compute.AwsAttributes: "availability": "description": |- @@ -811,6 +875,9 @@ github.com/databricks/databricks-sdk-go/service/compute.Environment: "dependencies": "description": |- List of pip dependencies, as supported by the version of pip in this environment. + "java_dependencies": + "description": |- + PLACEHOLDER github.com/databricks/databricks-sdk-go/service/compute.GcpAttributes: "availability": "description": |- @@ -908,6 +975,10 @@ github.com/databricks/databricks-sdk-go/service/pipelines.CronTrigger: "timezone_id": "description": |- PLACEHOLDER +github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinition: + "netsuite_jar_path": + "description": |- + PLACEHOLDER github.com/databricks/databricks-sdk-go/service/pipelines.PipelineLibrary: "whl": "deprecation_message": |- @@ -919,6 +990,10 @@ github.com/databricks/databricks-sdk-go/service/pipelines.PipelineTrigger: "manual": "description": |- PLACEHOLDER +github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig: + "workday_report_parameters": + "description": |- + PLACEHOLDER github.com/databricks/databricks-sdk-go/service/serving.Route: "served_entity_name": "description": |- @@ -934,6 +1009,44 @@ github.com/databricks/databricks-sdk-go/service/serving.ServedModelInput: "model_version": "description": |- PLACEHOLDER +github.com/databricks/databricks-sdk-go/service/sql.AlertV2Notification: + "subscriptions": + "description": |- + PLACEHOLDER +github.com/databricks/databricks-sdk-go/service/sql.AlertV2Operand: + "column": + "description": |- + PLACEHOLDER + "value": + "description": |- + PLACEHOLDER +github.com/databricks/databricks-sdk-go/service/sql.AlertV2OperandColumn: + "aggregation": + "description": |- + PLACEHOLDER + "display": + "description": |- + PLACEHOLDER + "name": + "description": |- + PLACEHOLDER +github.com/databricks/databricks-sdk-go/service/sql.AlertV2OperandValue: + "bool_value": + "description": |- + PLACEHOLDER + "double_value": + "description": |- + PLACEHOLDER + "string_value": + "description": |- + PLACEHOLDER +github.com/databricks/databricks-sdk-go/service/sql.AlertV2Subscription: + "destination_id": + "description": |- + PLACEHOLDER + "user_email": + "description": |- + PLACEHOLDER github.com/databricks/databricks-sdk-go/service/sql.Channel: "dbsql_version": "description": |- diff --git a/bundle/internal/validation/generated/enum_fields.go b/bundle/internal/validation/generated/enum_fields.go index f10be54bbd..340032f888 100644 --- a/bundle/internal/validation/generated/enum_fields.go +++ b/bundle/internal/validation/generated/enum_fields.go @@ -8,6 +8,14 @@ var EnumFields = map[string][]string{ "artifacts.*.executable": {"bash", "sh", "cmd"}, "artifacts.*.type": {"whl", "jar"}, + "resources.alerts.*.evaluation.comparison_operator": {"EQUAL", "GREATER_THAN", "GREATER_THAN_OR_EQUAL", "IS_NOT_NULL", "IS_NULL", "LESS_THAN", "LESS_THAN_OR_EQUAL", "NOT_EQUAL"}, + "resources.alerts.*.evaluation.empty_result_state": {"ERROR", "OK", "TRIGGERED", "UNKNOWN"}, + "resources.alerts.*.evaluation.source.aggregation": {"AVG", "COUNT", "COUNT_DISTINCT", "MAX", "MEDIAN", "MIN", "STDDEV", "SUM"}, + "resources.alerts.*.evaluation.state": {"ERROR", "OK", "TRIGGERED", "UNKNOWN"}, + "resources.alerts.*.evaluation.threshold.column.aggregation": {"AVG", "COUNT", "COUNT_DISTINCT", "MAX", "MEDIAN", "MIN", "STDDEV", "SUM"}, + "resources.alerts.*.lifecycle_state": {"ACTIVE", "DELETED"}, + "resources.alerts.*.schedule.pause_status": {"PAUSED", "UNPAUSED"}, + "resources.apps.*.active_deployment.mode": {"AUTO_SYNC", "SNAPSHOT"}, "resources.apps.*.active_deployment.status.state": {"CANCELLED", "FAILED", "IN_PROGRESS", "SUCCEEDED"}, "resources.apps.*.app_status.state": {"CRASHED", "DEPLOYING", "RUNNING", "UNAVAILABLE"}, diff --git a/bundle/internal/validation/generated/required_fields.go b/bundle/internal/validation/generated/required_fields.go index dcd81254be..2cc2045b9a 100644 --- a/bundle/internal/validation/generated/required_fields.go +++ b/bundle/internal/validation/generated/required_fields.go @@ -11,6 +11,13 @@ var RequiredFields = map[string][]string{ "permissions[*]": {"level"}, + "resources.alerts.*": {"display_name", "evaluation", "query_text", "schedule", "warehouse_id"}, + "resources.alerts.*.evaluation": {"comparison_operator", "source"}, + "resources.alerts.*.evaluation.source": {"name"}, + "resources.alerts.*.evaluation.threshold.column": {"name"}, + "resources.alerts.*.permissions[*]": {"level"}, + "resources.alerts.*.schedule": {"quartz_cron_schedule", "timezone_id"}, + "resources.apps.*": {"name", "source_code_path"}, "resources.apps.*.permissions[*]": {"level"}, "resources.apps.*.resources[*]": {"name"}, diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index 433d436dd1..023f60407a 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -59,6 +59,111 @@ "cli": { "bundle": { "config": { + "resources.Alert": { + "oneOf": [ + { + "type": "object", + "properties": { + "custom_description": { + "$ref": "#/$defs/string" + }, + "custom_summary": { + "$ref": "#/$defs/string" + }, + "display_name": { + "$ref": "#/$defs/string" + }, + "evaluation": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2Evaluation" + }, + "lifecycle": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" + }, + "parent_path": { + "$ref": "#/$defs/string" + }, + "permissions": { + "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.AlertPermission" + }, + "query_text": { + "$ref": "#/$defs/string" + }, + "run_as": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2RunAs" + }, + "run_as_user_name": { + "$ref": "#/$defs/string", + "deprecationMessage": "This field is deprecated", + "deprecated": true + }, + "schedule": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.CronSchedule" + }, + "warehouse_id": { + "$ref": "#/$defs/string" + } + }, + "additionalProperties": false, + "required": [ + "display_name", + "evaluation", + "query_text", + "schedule", + "warehouse_id" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "resources.AlertPermission": { + "oneOf": [ + { + "type": "object", + "properties": { + "group_name": { + "$ref": "#/$defs/string" + }, + "level": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.AlertPermissionLevel" + }, + "service_principal_name": { + "$ref": "#/$defs/string" + }, + "user_name": { + "$ref": "#/$defs/string" + } + }, + "additionalProperties": false, + "required": [ + "level" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "resources.AlertPermissionLevel": { + "oneOf": [ + { + "type": "string", + "enum": [ + "CAN_EDIT", + "CAN_MANAGE", + "CAN_READ", + "CAN_RUN" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "resources.App": { "oneOf": [ { @@ -1394,11 +1499,9 @@ "type": "object", "properties": { "aliases": { - "description": "List of aliases associated with the registered model", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/catalog.RegisteredModelAlias" }, "browse_only": { - "description": "Indicates whether the principal is limited to retrieving metadata for the associated object through the BROWSE privilege when include_browse is enabled in the request.", "$ref": "#/$defs/bool" }, "catalog_name": { @@ -1410,15 +1513,12 @@ "$ref": "#/$defs/string" }, "created_at": { - "description": "Creation timestamp of the registered model in milliseconds since the Unix epoch", "$ref": "#/$defs/int64" }, "created_by": { - "description": "The identifier of the user who created the registered model", "$ref": "#/$defs/string" }, "full_name": { - "description": "The three-level (fully qualified) name of the registered model", "$ref": "#/$defs/string" }, "grants": { @@ -1429,7 +1529,6 @@ "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" }, "metastore_id": { - "description": "The unique identifier of the metastore", "$ref": "#/$defs/string" }, "name": { @@ -1437,7 +1536,6 @@ "$ref": "#/$defs/string" }, "owner": { - "description": "The identifier of the user who owns the registered model", "$ref": "#/$defs/string" }, "schema_name": { @@ -1449,11 +1547,9 @@ "$ref": "#/$defs/string" }, "updated_at": { - "description": "Last-update timestamp of the registered model in milliseconds since the Unix epoch", "$ref": "#/$defs/int64" }, "updated_by": { - "description": "The identifier of the user who updated the registered model last time", "$ref": "#/$defs/string" } }, @@ -2332,6 +2428,9 @@ { "type": "object", "properties": { + "alerts": { + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Alert" + }, "apps": { "description": "The app resource defines a Databricks app.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.App", @@ -3102,7 +3201,8 @@ "type": "string", "enum": [ "MEDIUM", - "LARGE" + "LARGE", + "LIQUID" ] }, { @@ -3424,19 +3524,15 @@ "$ref": "#/$defs/string" }, "catalog_name": { - "description": "The name of the catalog containing the model version", "$ref": "#/$defs/string" }, "id": { - "description": "The unique identifier of the alias", "$ref": "#/$defs/string" }, "model_name": { - "description": "The name of the parent registered model of the model version, relative to parent schema", "$ref": "#/$defs/string" }, "schema_name": { - "description": "The name of the schema containing the model version, relative to parent catalog", "$ref": "#/$defs/string" }, "version_num": { @@ -3941,7 +4037,6 @@ "$ref": "#/$defs/string" }, "java_dependencies": { - "description": "List of java dependencies. Each dependency is a string representing a java library path. For example: `/Volumes/path/to/test.jar`.", "$ref": "#/$defs/slice/string" } }, @@ -7064,7 +7159,6 @@ "$ref": "#/$defs/string" }, "netsuite_jar_path": { - "description": "Netsuite only configuration. When the field is set for a netsuite connector,\nthe jar stored in the field will be validated and added to the classpath of\npipeline's cluster.", "$ref": "#/$defs/string", "x-databricks-preview": "PRIVATE", "doNotSuggest": true @@ -7182,6 +7276,8 @@ "enum": [ "MYSQL", "POSTGRESQL", + "REDSHIFT", + "SQLDW", "SQLSERVER", "SALESFORCE", "BIGQUERY", @@ -7194,6 +7290,15 @@ "TERADATA", "SHAREPOINT", "DYNAMICS365", + "CONFLUENCE", + "META_MARKETING", + "GOOGLE_ADS", + "TIKTOK_ADS", + "SALESFORCE_MARKETING_CLOUD", + "HUBSPOT", + "WORKDAY_HCM", + "GUIDEWIRE", + "ZENDESK", "FOREIGN_CATALOG" ] }, @@ -7817,7 +7922,6 @@ "$ref": "#/$defs/slice/string" }, "workday_report_parameters": { - "description": "(Optional) Additional custom parameters for Workday Report", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinitionWorkdayReportParameters", "x-databricks-preview": "PRIVATE", "doNotSuggest": true @@ -8944,6 +9048,234 @@ } ] }, + "sql.Aggregation": { + "oneOf": [ + { + "type": "string", + "enum": [ + "SUM", + "COUNT", + "COUNT_DISTINCT", + "AVG", + "MEDIAN", + "MIN", + "MAX", + "STDDEV" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "sql.AlertEvaluationState": { + "oneOf": [ + { + "type": "string", + "description": "UNSPECIFIED - default unspecify value for proto enum, do not use it in the code\nUNKNOWN - alert not yet evaluated\nTRIGGERED - alert is triggered\nOK - alert is not triggered\nERROR - alert evaluation failed", + "enum": [ + "UNKNOWN", + "TRIGGERED", + "OK", + "ERROR" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "sql.AlertLifecycleState": { + "oneOf": [ + { + "type": "string", + "enum": [ + "ACTIVE", + "DELETED" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "sql.AlertV2Evaluation": { + "oneOf": [ + { + "type": "object", + "properties": { + "comparison_operator": { + "description": "Operator used for comparison in alert evaluation.", + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.ComparisonOperator" + }, + "empty_result_state": { + "description": "Alert state if result is empty. Please avoid setting this field to be `UNKNOWN` because `UNKNOWN` state is planned to be deprecated.", + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertEvaluationState" + }, + "notification": { + "description": "User or Notification Destination to notify when alert is triggered.", + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2Notification" + }, + "source": { + "description": "Source column from result to use to evaluate alert", + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2OperandColumn" + }, + "threshold": { + "description": "Threshold to user for alert evaluation, can be a column or a value.", + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2Operand" + } + }, + "additionalProperties": false, + "required": [ + "comparison_operator", + "source" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "sql.AlertV2Notification": { + "oneOf": [ + { + "type": "object", + "properties": { + "notify_on_ok": { + "description": "Whether to notify alert subscribers when alert returns back to normal.", + "$ref": "#/$defs/bool" + }, + "retrigger_seconds": { + "description": "Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it can be triggered again. If 0 or not specified, the alert will not be triggered again.", + "$ref": "#/$defs/int" + }, + "subscriptions": { + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/sql.AlertV2Subscription" + } + }, + "additionalProperties": false + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "sql.AlertV2Operand": { + "oneOf": [ + { + "type": "object", + "properties": { + "column": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2OperandColumn" + }, + "value": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2OperandValue" + } + }, + "additionalProperties": false + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "sql.AlertV2OperandColumn": { + "oneOf": [ + { + "type": "object", + "properties": { + "aggregation": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.Aggregation" + }, + "display": { + "$ref": "#/$defs/string" + }, + "name": { + "$ref": "#/$defs/string" + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "sql.AlertV2OperandValue": { + "oneOf": [ + { + "type": "object", + "properties": { + "bool_value": { + "$ref": "#/$defs/bool" + }, + "double_value": { + "$ref": "#/$defs/float64" + }, + "string_value": { + "$ref": "#/$defs/string" + } + }, + "additionalProperties": false + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "sql.AlertV2RunAs": { + "oneOf": [ + { + "type": "object", + "properties": { + "service_principal_name": { + "description": "Application ID of an active service principal. Setting this field requires the `servicePrincipal/user` role.", + "$ref": "#/$defs/string" + }, + "user_name": { + "description": "The email of an active workspace user. Can only set this field to their own email.", + "$ref": "#/$defs/string" + } + }, + "additionalProperties": false + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "sql.AlertV2Subscription": { + "oneOf": [ + { + "type": "object", + "properties": { + "destination_id": { + "$ref": "#/$defs/string" + }, + "user_email": { + "$ref": "#/$defs/string" + } + }, + "additionalProperties": false + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "sql.Channel": { "oneOf": [ { @@ -8982,6 +9314,27 @@ } ] }, + "sql.ComparisonOperator": { + "oneOf": [ + { + "type": "string", + "enum": [ + "LESS_THAN", + "GREATER_THAN", + "EQUAL", + "NOT_EQUAL", + "GREATER_THAN_OR_EQUAL", + "LESS_THAN_OR_EQUAL", + "IS_NULL", + "IS_NOT_NULL" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "sql.CreateWarehouseRequestWarehouseType": { "oneOf": [ { @@ -8998,6 +9351,36 @@ } ] }, + "sql.CronSchedule": { + "oneOf": [ + { + "type": "object", + "properties": { + "pause_status": { + "description": "Indicate whether this schedule is paused or not.", + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.SchedulePauseStatus" + }, + "quartz_cron_schedule": { + "description": "A cron expression using quartz syntax that specifies the schedule for this pipeline.\nShould use the quartz format described here: http://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/tutorial-lesson-06.html", + "$ref": "#/$defs/string" + }, + "timezone_id": { + "description": "A Java timezone id. The schedule will be resolved using this timezone.\nThis will be combined with the quartz_cron_schedule to determine the schedule.\nSee https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html for details.", + "$ref": "#/$defs/string" + } + }, + "additionalProperties": false, + "required": [ + "quartz_cron_schedule", + "timezone_id" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "sql.EndpointTagPair": { "oneOf": [ { @@ -9035,6 +9418,21 @@ } ] }, + "sql.SchedulePauseStatus": { + "oneOf": [ + { + "type": "string", + "enum": [ + "UNPAUSED", + "PAUSED" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "sql.SpotInstancePolicy": { "oneOf": [ { @@ -9160,6 +9558,20 @@ "cli": { "bundle": { "config": { + "resources.Alert": { + "oneOf": [ + { + "type": "object", + "additionalProperties": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Alert" + } + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "resources.App": { "oneOf": [ { @@ -9508,6 +9920,20 @@ "cli": { "bundle": { "config": { + "resources.AlertPermission": { + "oneOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.AlertPermission" + } + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "resources.AppPermission": { "oneOf": [ { @@ -10213,6 +10639,20 @@ } ] }, + "sql.AlertV2Subscription": { + "oneOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2Subscription" + } + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "sql.EndpointTagPair": { "oneOf": [ { diff --git a/bundle/statemgmt/state_load_test.go b/bundle/statemgmt/state_load_test.go index 09aaa0b22a..e57cbdb443 100644 --- a/bundle/statemgmt/state_load_test.go +++ b/bundle/statemgmt/state_load_test.go @@ -43,7 +43,7 @@ func TestStateToBundleEmptyLocalResources(t *testing.T) { "resources.database_instances.test_database_instance": {ID: "1"}, "resources.database_catalogs.test_database_catalog": {ID: "1"}, "resources.synced_database_tables.test_synced_database_table": {ID: "1"}, - // "resources.alerts.test_alert": {ID: "1"}, + "resources.alerts.test_alert": {ID: "1"}, } err := StateToBundle(context.Background(), state, &config) assert.NoError(t, err) @@ -94,8 +94,8 @@ func TestStateToBundleEmptyLocalResources(t *testing.T) { assert.Equal(t, "1", config.Resources.DatabaseInstances["test_database_instance"].ID) assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.DatabaseInstances["test_database_instance"].ModifiedStatus) - // assert.Equal(t, "1", config.Resources.Alerts["test_alert"].ID) - // assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.Alerts["test_alert"].ModifiedStatus) + assert.Equal(t, "1", config.Resources.Alerts["test_alert"].ID) + assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.Alerts["test_alert"].ModifiedStatus) AssertFullResourceCoverage(t, &config) } @@ -222,13 +222,13 @@ func TestStateToBundleEmptyRemoteResources(t *testing.T) { }, }, }, - // Alerts: map[string]*resources.Alert{ - // "test_alert": { - // AlertV2: sql.AlertV2{ - // DisplayName: "test_alert", - // }, - // }, - // }, + Alerts: map[string]*resources.Alert{ + "test_alert": { + AlertV2: sql.AlertV2{ + DisplayName: "test_alert", + }, + }, + }, }, } @@ -286,8 +286,8 @@ func TestStateToBundleEmptyRemoteResources(t *testing.T) { assert.Equal(t, "", config.Resources.SyncedDatabaseTables["test_synced_database_table"].ID) assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.SyncedDatabaseTables["test_synced_database_table"].ModifiedStatus) - // assert.Equal(t, "", config.Resources.Alerts["test_alert"].ID) - // assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Alerts["test_alert"].ModifiedStatus) + assert.Equal(t, "", config.Resources.Alerts["test_alert"].ID) + assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Alerts["test_alert"].ModifiedStatus) AssertFullResourceCoverage(t, &config) } @@ -499,18 +499,18 @@ func TestStateToBundleModifiedResources(t *testing.T) { }, }, }, - // Alerts: map[string]*resources.Alert{ - // "test_alert": { - // AlertV2: sql.AlertV2{ - // DisplayName: "test_alert", - // }, - // }, - // "test_alert_new": { - // AlertV2: sql.AlertV2{ - // DisplayName: "test_alert_new", - // }, - // }, - // }, + Alerts: map[string]*resources.Alert{ + "test_alert": { + AlertV2: sql.AlertV2{ + DisplayName: "test_alert", + }, + }, + "test_alert_new": { + AlertV2: sql.AlertV2{ + DisplayName: "test_alert_new", + }, + }, + }, }, } state := ExportedResourcesMap{ @@ -544,8 +544,8 @@ func TestStateToBundleModifiedResources(t *testing.T) { "resources.sql_warehouses.test_sql_warehouse_old": {ID: "2"}, "resources.database_instances.test_database_instance": {ID: "1"}, "resources.database_instances.test_database_instance_old": {ID: "2"}, - // "resources.alerts.test_alert": {ID: "1"}, - // "resources.alerts.test_alert_old": {ID: "2"}, + "resources.alerts.test_alert": {ID: "1"}, + "resources.alerts.test_alert_old": {ID: "2"}, } err := StateToBundle(context.Background(), state, &config) assert.NoError(t, err) @@ -657,12 +657,12 @@ func TestStateToBundleModifiedResources(t *testing.T) { assert.Equal(t, "", config.Resources.DatabaseInstances["test_database_instance_new"].ID) assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.DatabaseInstances["test_database_instance_new"].ModifiedStatus) - // assert.Equal(t, "1", config.Resources.Alerts["test_alert"].ID) - // assert.Equal(t, "", config.Resources.Alerts["test_alert"].ModifiedStatus) - // assert.Equal(t, "2", config.Resources.Alerts["test_alert_old"].ID) - // assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.Alerts["test_alert_old"].ModifiedStatus) - // assert.Equal(t, "", config.Resources.Alerts["test_alert_new"].ID) - // assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Alerts["test_alert_new"].ModifiedStatus) + assert.Equal(t, "1", config.Resources.Alerts["test_alert"].ID) + assert.Equal(t, "", config.Resources.Alerts["test_alert"].ModifiedStatus) + assert.Equal(t, "2", config.Resources.Alerts["test_alert_old"].ID) + assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.Alerts["test_alert_old"].ModifiedStatus) + assert.Equal(t, "", config.Resources.Alerts["test_alert_new"].ID) + assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Alerts["test_alert_new"].ModifiedStatus) AssertFullResourceCoverage(t, &config) } diff --git a/libs/structs/structwalk/walktype_test.go b/libs/structs/structwalk/walktype_test.go index fded9f8055..0ad985cf71 100644 --- a/libs/structs/structwalk/walktype_test.go +++ b/libs/structs/structwalk/walktype_test.go @@ -136,7 +136,7 @@ func TestTypeJobSettings(t *testing.T) { func TestTypeRoot(t *testing.T) { testStruct(t, reflect.TypeOf(config.Root{}), - 4000, 4300, // 4003 at the time of the update + 4300, 4700, // 4322 at the time of the update map[string]any{ "bundle.target": "", `variables.*.lookup.dashboard`: "", diff --git a/libs/testserver/alerts.go b/libs/testserver/alerts.go index 7efd4be648..a40090f1e7 100644 --- a/libs/testserver/alerts.go +++ b/libs/testserver/alerts.go @@ -40,3 +40,21 @@ func (s *FakeWorkspace) AlertsUpsert(req Request, alertId string) Response { Body: alert, } } + +func (s *FakeWorkspace) AlertsDelete(alertId string) Response { + defer s.LockUnlock()() + + alert, ok := s.Alerts[alertId] + if !ok { + return Response{ + StatusCode: 404, + } + } + + alert.LifecycleState = sql.AlertLifecycleStateDeleted + s.Alerts[alertId] = alert + + return Response{ + StatusCode: 200, + } +} diff --git a/libs/testserver/handlers.go b/libs/testserver/handlers.go index 56ff7616d4..dbb59a048b 100644 --- a/libs/testserver/handlers.go +++ b/libs/testserver/handlers.go @@ -458,7 +458,7 @@ func AddDefaultHandlers(server *Server) { }) server.Handle("DELETE", "/api/2.0/alerts/{id}", func(req Request) any { - return MapDelete(req.Workspace, req.Workspace.Alerts, req.Vars["id"]) + return req.Workspace.AlertsDelete(req.Vars["id"]) }) // Secrets ACLs: diff --git a/libs/testserver/permissions.go b/libs/testserver/permissions.go index 722b7bdc00..12483429af 100644 --- a/libs/testserver/permissions.go +++ b/libs/testserver/permissions.go @@ -206,6 +206,30 @@ func (s *FakeWorkspace) SetPermissions(req Request) any { }) } + // Add default ACLs for alertsv2 to match cloud environment + if requestObjectType == "alertsv2" { + existingPermissions.AccessControlList = append(existingPermissions.AccessControlList, iam.AccessControlResponse{ + AllPermissions: []iam.Permission{ + { + Inherited: true, + InheritedFromObject: []string{"/directories/4454031293888593"}, + PermissionLevel: "CAN_MANAGE", + }, + }, + UserName: "shreyas.goenka@databricks.com", + DisplayName: "shreyas.goenka@databricks.com", + }, iam.AccessControlResponse{ + AllPermissions: []iam.Permission{ + { + Inherited: true, + InheritedFromObject: []string{"/directories/"}, + PermissionLevel: "CAN_MANAGE", + }, + }, + GroupName: "admins", + }) + } + // Validate job ownership requirements if requestObjectType == "jobs" { hasOwner := false diff --git a/python/databricks/bundles/jobs/_models/environment.py b/python/databricks/bundles/jobs/_models/environment.py index 8beefedfb6..b912693ef2 100644 --- a/python/databricks/bundles/jobs/_models/environment.py +++ b/python/databricks/bundles/jobs/_models/environment.py @@ -34,9 +34,6 @@ class Environment: """ java_dependencies: VariableOrList[str] = field(default_factory=list) - """ - List of java dependencies. Each dependency is a string representing a java library path. For example: `/Volumes/path/to/test.jar`. - """ @classmethod def from_dict(cls, value: "EnvironmentDict") -> "Self": @@ -67,9 +64,6 @@ class EnvironmentDict(TypedDict, total=False): """ java_dependencies: VariableOrList[str] - """ - List of java dependencies. Each dependency is a string representing a java library path. For example: `/Volumes/path/to/test.jar`. - """ EnvironmentParam = EnvironmentDict | Environment diff --git a/python/databricks/bundles/pipelines/_models/ingestion_pipeline_definition.py b/python/databricks/bundles/pipelines/_models/ingestion_pipeline_definition.py index 9276bc0419..dc3b447396 100644 --- a/python/databricks/bundles/pipelines/_models/ingestion_pipeline_definition.py +++ b/python/databricks/bundles/pipelines/_models/ingestion_pipeline_definition.py @@ -48,10 +48,6 @@ class IngestionPipelineDefinition: netsuite_jar_path: VariableOrOptional[str] = None """ :meta private: [EXPERIMENTAL] - - Netsuite only configuration. When the field is set for a netsuite connector, - the jar stored in the field will be validated and added to the classpath of - pipeline's cluster. """ objects: VariableOrList[IngestionConfig] = field(default_factory=list) @@ -103,10 +99,6 @@ class IngestionPipelineDefinitionDict(TypedDict, total=False): netsuite_jar_path: VariableOrOptional[str] """ :meta private: [EXPERIMENTAL] - - Netsuite only configuration. When the field is set for a netsuite connector, - the jar stored in the field will be validated and added to the classpath of - pipeline's cluster. """ objects: VariableOrList[IngestionConfigParam] diff --git a/python/databricks/bundles/pipelines/_models/table_specific_config.py b/python/databricks/bundles/pipelines/_models/table_specific_config.py index 0b7c6b1a69..502d170878 100644 --- a/python/databricks/bundles/pipelines/_models/table_specific_config.py +++ b/python/databricks/bundles/pipelines/_models/table_specific_config.py @@ -80,8 +80,6 @@ class TableSpecificConfig: ] = None """ :meta private: [EXPERIMENTAL] - - (Optional) Additional custom parameters for Workday Report """ @classmethod @@ -150,8 +148,6 @@ class TableSpecificConfigDict(TypedDict, total=False): ] """ :meta private: [EXPERIMENTAL] - - (Optional) Additional custom parameters for Workday Report """