diff --git a/experimental/python/databricks/bundles/compute/_models/cluster_log_conf.py b/experimental/python/databricks/bundles/compute/_models/cluster_log_conf.py index f58356ae98..6abf15e7c5 100644 --- a/experimental/python/databricks/bundles/compute/_models/cluster_log_conf.py +++ b/experimental/python/databricks/bundles/compute/_models/cluster_log_conf.py @@ -9,6 +9,10 @@ S3StorageInfo, S3StorageInfoParam, ) +from databricks.bundles.compute._models.volumes_storage_info import ( + VolumesStorageInfo, + VolumesStorageInfoParam, +) from databricks.bundles.core._transform import _transform from databricks.bundles.core._transform_to_json import _transform_to_json_value from databricks.bundles.core._variable import VariableOrOptional @@ -35,6 +39,12 @@ class ClusterLogConf: `instance_profile_arn` has permission to write data to the s3 destination. """ + volumes: VariableOrOptional[VolumesStorageInfo] = None + """ + destination needs to be provided. e.g. + `{ "volumes" : { "destination" : "/Volumes/catalog/schema/volume/cluster_log" } }` + """ + @classmethod def from_dict(cls, value: "ClusterLogConfDict") -> "Self": return _transform(cls, value) @@ -60,5 +70,11 @@ class ClusterLogConfDict(TypedDict, total=False): `instance_profile_arn` has permission to write data to the s3 destination. """ + volumes: VariableOrOptional[VolumesStorageInfoParam] + """ + destination needs to be provided. e.g. + `{ "volumes" : { "destination" : "/Volumes/catalog/schema/volume/cluster_log" } }` + """ + ClusterLogConfParam = ClusterLogConfDict | ClusterLogConf diff --git a/experimental/python/databricks/bundles/compute/_models/cluster_spec.py b/experimental/python/databricks/bundles/compute/_models/cluster_spec.py index 6f12dc3484..9b37322933 100644 --- a/experimental/python/databricks/bundles/compute/_models/cluster_spec.py +++ b/experimental/python/databricks/bundles/compute/_models/cluster_spec.py @@ -91,7 +91,7 @@ class ClusterSpec: cluster_log_conf: VariableOrOptional[ClusterLogConf] = None """ The configuration for delivering spark logs to a long-term storage destination. - Two kinds of destinations (dbfs and s3) are supported. Only one destination can be specified + Three kinds of destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be specified for one cluster. If the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is `$destination/$clusterId/executor`. @@ -163,7 +163,7 @@ class ClusterSpec: is_single_node: VariableOrOptional[bool] = None """ - This field can only be used with `kind`. + This field can only be used when `kind = CLASSIC_PREVIEW`. When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers` @@ -242,7 +242,7 @@ class ClusterSpec: use_ml_runtime: VariableOrOptional[bool] = None """ - This field can only be used with `kind`. + This field can only be used when `kind = CLASSIC_PREVIEW`. `effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not. @@ -295,7 +295,7 @@ class ClusterSpecDict(TypedDict, total=False): cluster_log_conf: VariableOrOptional[ClusterLogConfParam] """ The configuration for delivering spark logs to a long-term storage destination. - Two kinds of destinations (dbfs and s3) are supported. Only one destination can be specified + Three kinds of destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be specified for one cluster. If the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is `$destination/$clusterId/executor`. @@ -367,7 +367,7 @@ class ClusterSpecDict(TypedDict, total=False): is_single_node: VariableOrOptional[bool] """ - This field can only be used with `kind`. + This field can only be used when `kind = CLASSIC_PREVIEW`. When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers` @@ -446,7 +446,7 @@ class ClusterSpecDict(TypedDict, total=False): use_ml_runtime: VariableOrOptional[bool] """ - This field can only be used with `kind`. + This field can only be used when `kind = CLASSIC_PREVIEW`. `effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not. diff --git a/experimental/python/databricks/bundles/compute/_models/data_security_mode.py b/experimental/python/databricks/bundles/compute/_models/data_security_mode.py index 2fdeb66bb9..6f642a13ba 100644 --- a/experimental/python/databricks/bundles/compute/_models/data_security_mode.py +++ b/experimental/python/databricks/bundles/compute/_models/data_security_mode.py @@ -7,7 +7,7 @@ class DataSecurityMode(Enum): Data security mode decides what data governance model to use when accessing data from a cluster. - The following modes can only be used with `kind`. + The following modes can only be used when `kind = CLASSIC_PREVIEW`. * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`. diff --git a/experimental/python/databricks/bundles/compute/_models/environment.py b/experimental/python/databricks/bundles/compute/_models/environment.py index cf2c654d6c..7cca7ae87c 100644 --- a/experimental/python/databricks/bundles/compute/_models/environment.py +++ b/experimental/python/databricks/bundles/compute/_models/environment.py @@ -27,9 +27,6 @@ class Environment: dependencies: VariableOrList[str] = field(default_factory=list) """ List of pip dependencies, as supported by the version of pip in this environment. - Each dependency is a pip requirement file line https://pip.pypa.io/en/stable/reference/requirements-file-format/ - Allowed dependency could be , , (WSFS or Volumes in Databricks), - E.g. dependencies: ["foo==0.0.1", "-r /Workspace/test/requirements.txt"] """ @classmethod @@ -54,9 +51,6 @@ class EnvironmentDict(TypedDict, total=False): dependencies: VariableOrList[str] """ List of pip dependencies, as supported by the version of pip in this environment. - Each dependency is a pip requirement file line https://pip.pypa.io/en/stable/reference/requirements-file-format/ - Allowed dependency could be , , (WSFS or Volumes in Databricks), - E.g. dependencies: ["foo==0.0.1", "-r /Workspace/test/requirements.txt"] """ diff --git a/experimental/python/databricks/bundles/compute/_models/init_script_info.py b/experimental/python/databricks/bundles/compute/_models/init_script_info.py index c00a04601b..47efb2e271 100644 --- a/experimental/python/databricks/bundles/compute/_models/init_script_info.py +++ b/experimental/python/databricks/bundles/compute/_models/init_script_info.py @@ -43,8 +43,7 @@ class InitScriptInfo: abfss: VariableOrOptional[Adlsgen2Info] = None """ - destination needs to be provided. e.g. - `{ "abfss" : { "destination" : "abfss://@.dfs.core.windows.net/" } }` + Contains the Azure Data Lake Storage destination path """ dbfs: VariableOrOptional[DbfsStorageInfo] = None @@ -98,8 +97,7 @@ class InitScriptInfoDict(TypedDict, total=False): abfss: VariableOrOptional[Adlsgen2InfoParam] """ - destination needs to be provided. e.g. - `{ "abfss" : { "destination" : "abfss://@.dfs.core.windows.net/" } } + Contains the Azure Data Lake Storage destination path """ dbfs: VariableOrOptional[DbfsStorageInfoParam] diff --git a/experimental/python/databricks/bundles/compute/_models/log_analytics_info.py b/experimental/python/databricks/bundles/compute/_models/log_analytics_info.py index 35033e171d..5eced870a1 100644 --- a/experimental/python/databricks/bundles/compute/_models/log_analytics_info.py +++ b/experimental/python/databricks/bundles/compute/_models/log_analytics_info.py @@ -15,12 +15,12 @@ class LogAnalyticsInfo: log_analytics_primary_key: VariableOrOptional[str] = None """ - + The primary key for the Azure Log Analytics agent configuration """ log_analytics_workspace_id: VariableOrOptional[str] = None """ - + The workspace ID for the Azure Log Analytics agent configuration """ @classmethod @@ -36,12 +36,12 @@ class LogAnalyticsInfoDict(TypedDict, total=False): log_analytics_primary_key: VariableOrOptional[str] """ - + The primary key for the Azure Log Analytics agent configuration """ log_analytics_workspace_id: VariableOrOptional[str] """ - + The workspace ID for the Azure Log Analytics agent configuration """ diff --git a/experimental/python/databricks/bundles/compute/_models/volumes_storage_info.py b/experimental/python/databricks/bundles/compute/_models/volumes_storage_info.py index 53b3914503..46cec7ab6c 100644 --- a/experimental/python/databricks/bundles/compute/_models/volumes_storage_info.py +++ b/experimental/python/databricks/bundles/compute/_models/volumes_storage_info.py @@ -15,7 +15,7 @@ class VolumesStorageInfo: destination: VariableOr[str] """ - Unity Catalog Volumes file destination, e.g. `/Volumes/my-init.sh` + Unity Catalog volumes file destination, e.g. `/Volumes/catalog/schema/volume/dir/file` """ @classmethod @@ -31,7 +31,7 @@ class VolumesStorageInfoDict(TypedDict, total=False): destination: VariableOr[str] """ - Unity Catalog Volumes file destination, e.g. `/Volumes/my-init.sh` + Unity Catalog volumes file destination, e.g. `/Volumes/catalog/schema/volume/dir/file` """ diff --git a/experimental/python/databricks/bundles/jobs/__init__.py b/experimental/python/databricks/bundles/jobs/__init__.py index 5c3eb0e736..5d7162b7ca 100644 --- a/experimental/python/databricks/bundles/jobs/__init__.py +++ b/experimental/python/databricks/bundles/jobs/__init__.py @@ -72,9 +72,6 @@ "GcsStorageInfoParam", "GitProvider", "GitProviderParam", - "GitSnapshot", - "GitSnapshotDict", - "GitSnapshotParam", "GitSource", "GitSourceDict", "GitSourceParam", @@ -129,6 +126,8 @@ "NotebookTaskParam", "PauseStatus", "PauseStatusParam", + "PerformanceTarget", + "PerformanceTargetParam", "PeriodicTriggerConfiguration", "PeriodicTriggerConfigurationDict", "PeriodicTriggerConfigurationParam", @@ -137,6 +136,9 @@ "Permission", "PermissionDict", "PermissionParam", + "PipelineParams", + "PipelineParamsDict", + "PipelineParamsParam", "PipelineTask", "PipelineTaskDict", "PipelineTaskParam", @@ -403,11 +405,6 @@ ForEachTaskParam, ) from databricks.bundles.jobs._models.git_provider import GitProvider, GitProviderParam -from databricks.bundles.jobs._models.git_snapshot import ( - GitSnapshot, - GitSnapshotDict, - GitSnapshotParam, -) from databricks.bundles.jobs._models.git_source import ( GitSource, GitSourceDict, @@ -468,6 +465,10 @@ NotebookTaskParam, ) from databricks.bundles.jobs._models.pause_status import PauseStatus, PauseStatusParam +from databricks.bundles.jobs._models.performance_target import ( + PerformanceTarget, + PerformanceTargetParam, +) from databricks.bundles.jobs._models.periodic_trigger_configuration import ( PeriodicTriggerConfiguration, PeriodicTriggerConfigurationDict, @@ -482,6 +483,11 @@ PermissionDict, PermissionParam, ) +from databricks.bundles.jobs._models.pipeline_params import ( + PipelineParams, + PipelineParamsDict, + PipelineParamsParam, +) from databricks.bundles.jobs._models.pipeline_task import ( PipelineTask, PipelineTaskDict, @@ -583,6 +589,7 @@ def _resolve_recursive_imports(): import typing from databricks.bundles.core._variable import VariableOr + from databricks.bundles.jobs._models.task import Task ForEachTask.__annotations__ = typing.get_type_hints( ForEachTask, diff --git a/experimental/python/databricks/bundles/jobs/_models/dbt_task.py b/experimental/python/databricks/bundles/jobs/_models/dbt_task.py index 290ff299af..52ae3cfa46 100644 --- a/experimental/python/databricks/bundles/jobs/_models/dbt_task.py +++ b/experimental/python/databricks/bundles/jobs/_models/dbt_task.py @@ -1,4 +1,4 @@ -from dataclasses import dataclass +from dataclasses import dataclass, field from typing import TYPE_CHECKING, TypedDict from databricks.bundles.core._transform import _transform @@ -14,14 +14,14 @@ class DbtTask: """""" - commands: VariableOrList[str] + catalog: VariableOrOptional[str] = None """ - A list of dbt commands to execute. All commands must start with `dbt`. This parameter must not be empty. A maximum of up to 10 commands can be provided. + Optional name of the catalog to use. The value is the top level in the 3-level namespace of Unity Catalog (catalog / schema / relation). The catalog value can only be specified if a warehouse_id is specified. Requires dbt-databricks >= 1.1.1. """ - catalog: VariableOrOptional[str] = None + commands: VariableOrList[str] = field(default_factory=list) """ - Optional name of the catalog to use. The value is the top level in the 3-level namespace of Unity Catalog (catalog / schema / relation). The catalog value can only be specified if a warehouse_id is specified. Requires dbt-databricks >= 1.1.1. + A list of dbt commands to execute. All commands must start with `dbt`. This parameter must not be empty. A maximum of up to 10 commands can be provided. """ profiles_directory: VariableOrOptional[str] = None @@ -66,14 +66,14 @@ def as_dict(self) -> "DbtTaskDict": class DbtTaskDict(TypedDict, total=False): """""" - commands: VariableOrList[str] + catalog: VariableOrOptional[str] """ - A list of dbt commands to execute. All commands must start with `dbt`. This parameter must not be empty. A maximum of up to 10 commands can be provided. + Optional name of the catalog to use. The value is the top level in the 3-level namespace of Unity Catalog (catalog / schema / relation). The catalog value can only be specified if a warehouse_id is specified. Requires dbt-databricks >= 1.1.1. """ - catalog: VariableOrOptional[str] + commands: VariableOrList[str] """ - Optional name of the catalog to use. The value is the top level in the 3-level namespace of Unity Catalog (catalog / schema / relation). The catalog value can only be specified if a warehouse_id is specified. Requires dbt-databricks >= 1.1.1. + A list of dbt commands to execute. All commands must start with `dbt`. This parameter must not be empty. A maximum of up to 10 commands can be provided. """ profiles_directory: VariableOrOptional[str] diff --git a/experimental/python/databricks/bundles/jobs/_models/git_snapshot.py b/experimental/python/databricks/bundles/jobs/_models/git_snapshot.py deleted file mode 100644 index 33a9c2e735..0000000000 --- a/experimental/python/databricks/bundles/jobs/_models/git_snapshot.py +++ /dev/null @@ -1,40 +0,0 @@ -from dataclasses import dataclass -from typing import TYPE_CHECKING, TypedDict - -from databricks.bundles.core._transform import _transform -from databricks.bundles.core._transform_to_json import _transform_to_json_value -from databricks.bundles.core._variable import VariableOrOptional - -if TYPE_CHECKING: - from typing_extensions import Self - - -@dataclass(kw_only=True) -class GitSnapshot: - """ - Read-only state of the remote repository at the time the job was run. This field is only included on job runs. - """ - - used_commit: VariableOrOptional[str] = None - """ - Commit that was used to execute the run. If git_branch was specified, this points to the HEAD of the branch at the time of the run; if git_tag was specified, this points to the commit the tag points to. - """ - - @classmethod - def from_dict(cls, value: "GitSnapshotDict") -> "Self": - return _transform(cls, value) - - def as_dict(self) -> "GitSnapshotDict": - return _transform_to_json_value(self) # type:ignore - - -class GitSnapshotDict(TypedDict, total=False): - """""" - - used_commit: VariableOrOptional[str] - """ - Commit that was used to execute the run. If git_branch was specified, this points to the HEAD of the branch at the time of the run; if git_tag was specified, this points to the commit the tag points to. - """ - - -GitSnapshotParam = GitSnapshotDict | GitSnapshot diff --git a/experimental/python/databricks/bundles/jobs/_models/job.py b/experimental/python/databricks/bundles/jobs/_models/job.py index 3332a664ae..3fa1496b17 100644 --- a/experimental/python/databricks/bundles/jobs/_models/job.py +++ b/experimental/python/databricks/bundles/jobs/_models/job.py @@ -17,7 +17,10 @@ CronSchedule, CronScheduleParam, ) -from databricks.bundles.jobs._models.git_source import GitSource, GitSourceParam +from databricks.bundles.jobs._models.git_source import ( + GitSource, + GitSourceParam, +) from databricks.bundles.jobs._models.job_cluster import JobCluster, JobClusterParam from databricks.bundles.jobs._models.job_email_notifications import ( JobEmailNotifications, @@ -40,6 +43,10 @@ JobsHealthRules, JobsHealthRulesParam, ) +from databricks.bundles.jobs._models.performance_target import ( + PerformanceTarget, + PerformanceTargetParam, +) from databricks.bundles.jobs._models.permission import Permission, PermissionParam from databricks.bundles.jobs._models.queue_settings import ( QueueSettings, @@ -107,6 +114,7 @@ class Job(Resource): job_clusters: VariableOrList[JobCluster] = field(default_factory=list) """ A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings. + If more than 100 job clusters are available, you can paginate through them using :method:jobs/get. """ max_concurrent_runs: VariableOrOptional[int] = None @@ -134,6 +142,11 @@ class Job(Resource): Job-level parameter definitions """ + performance_target: VariableOrOptional[PerformanceTarget] = None + """ + PerformanceTarget defines how performant or cost efficient the execution of run on serverless should be. + """ + permissions: VariableOrList[Permission] = field(default_factory=list) queue: VariableOrOptional[QueueSettings] = None @@ -156,6 +169,7 @@ class Job(Resource): tasks: VariableOrList[Task] = field(default_factory=list) """ A list of task specifications to be executed by this job. + If more than 100 tasks are available, you can paginate through them using :method:jobs/get. Use the `next_page_token` field at the object root to determine if more results are available. """ timeout_seconds: VariableOrOptional[int] = None @@ -228,6 +242,7 @@ class JobDict(TypedDict, total=False): job_clusters: VariableOrList[JobClusterParam] """ A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings. + If more than 100 job clusters are available, you can paginate through them using :method:jobs/get. """ max_concurrent_runs: VariableOrOptional[int] @@ -255,6 +270,11 @@ class JobDict(TypedDict, total=False): Job-level parameter definitions """ + performance_target: VariableOrOptional[PerformanceTargetParam] + """ + PerformanceTarget defines how performant or cost efficient the execution of run on serverless should be. + """ + permissions: VariableOrList[PermissionParam] queue: VariableOrOptional[QueueSettingsParam] @@ -277,6 +297,7 @@ class JobDict(TypedDict, total=False): tasks: VariableOrList[TaskParam] """ A list of task specifications to be executed by this job. + If more than 100 tasks are available, you can paginate through them using :method:jobs/get. Use the `next_page_token` field at the object root to determine if more results are available. """ timeout_seconds: VariableOrOptional[int] diff --git a/experimental/python/databricks/bundles/jobs/_models/job_run_as.py b/experimental/python/databricks/bundles/jobs/_models/job_run_as.py index 9187adb43d..ddf6a14522 100644 --- a/experimental/python/databricks/bundles/jobs/_models/job_run_as.py +++ b/experimental/python/databricks/bundles/jobs/_models/job_run_as.py @@ -19,7 +19,7 @@ class JobRunAs: service_principal_name: VariableOrOptional[str] = None """ - Application ID of an active service principal. Setting this field requires the `servicePrincipal/user` role. + The application ID of an active service principal. Setting this field requires the `servicePrincipal/user` role. """ user_name: VariableOrOptional[str] = None @@ -51,7 +51,7 @@ class JobRunAsDict(TypedDict, total=False): service_principal_name: VariableOrOptional[str] """ - Application ID of an active service principal. Setting this field requires the `servicePrincipal/user` role. + The application ID of an active service principal. Setting this field requires the `servicePrincipal/user` role. """ user_name: VariableOrOptional[str] diff --git a/experimental/python/databricks/bundles/jobs/_models/performance_target.py b/experimental/python/databricks/bundles/jobs/_models/performance_target.py new file mode 100644 index 0000000000..925e536386 --- /dev/null +++ b/experimental/python/databricks/bundles/jobs/_models/performance_target.py @@ -0,0 +1,18 @@ +from enum import Enum +from typing import Literal + + +class PerformanceTarget(Enum): + """ + PerformanceTarget defines how performant (lower latency) or cost efficient the execution of run on serverless compute should be. + The performance mode on the job or pipeline should map to a performance setting that is passed to Cluster Manager + (see cluster-common PerformanceTarget). + """ + + PERFORMANCE_OPTIMIZED = "PERFORMANCE_OPTIMIZED" + COST_OPTIMIZED = "COST_OPTIMIZED" + + +PerformanceTargetParam = ( + Literal["PERFORMANCE_OPTIMIZED", "COST_OPTIMIZED"] | PerformanceTarget +) diff --git a/experimental/python/databricks/bundles/jobs/_models/pipeline_params.py b/experimental/python/databricks/bundles/jobs/_models/pipeline_params.py new file mode 100644 index 0000000000..0aa552e2e6 --- /dev/null +++ b/experimental/python/databricks/bundles/jobs/_models/pipeline_params.py @@ -0,0 +1,38 @@ +from dataclasses import dataclass +from typing import TYPE_CHECKING, TypedDict + +from databricks.bundles.core._transform import _transform +from databricks.bundles.core._transform_to_json import _transform_to_json_value +from databricks.bundles.core._variable import VariableOrOptional + +if TYPE_CHECKING: + from typing_extensions import Self + + +@dataclass(kw_only=True) +class PipelineParams: + """""" + + full_refresh: VariableOrOptional[bool] = None + """ + If true, triggers a full refresh on the delta live table. + """ + + @classmethod + def from_dict(cls, value: "PipelineParamsDict") -> "Self": + return _transform(cls, value) + + def as_dict(self) -> "PipelineParamsDict": + return _transform_to_json_value(self) # type:ignore + + +class PipelineParamsDict(TypedDict, total=False): + """""" + + full_refresh: VariableOrOptional[bool] + """ + If true, triggers a full refresh on the delta live table. + """ + + +PipelineParamsParam = PipelineParamsDict | PipelineParams diff --git a/experimental/python/databricks/bundles/jobs/_models/run_job_task.py b/experimental/python/databricks/bundles/jobs/_models/run_job_task.py index 6ac9e15f92..c7b88ee5ea 100644 --- a/experimental/python/databricks/bundles/jobs/_models/run_job_task.py +++ b/experimental/python/databricks/bundles/jobs/_models/run_job_task.py @@ -3,7 +3,15 @@ from databricks.bundles.core._transform import _transform from databricks.bundles.core._transform_to_json import _transform_to_json_value -from databricks.bundles.core._variable import VariableOr, VariableOrDict +from databricks.bundles.core._variable import ( + VariableOr, + VariableOrDict, + VariableOrOptional, +) +from databricks.bundles.jobs._models.pipeline_params import ( + PipelineParams, + PipelineParamsParam, +) if TYPE_CHECKING: from typing_extensions import Self @@ -23,6 +31,11 @@ class RunJobTask: Job-level parameters used to trigger the job. """ + pipeline_params: VariableOrOptional[PipelineParams] = None + """ + Controls whether the pipeline should perform a full refresh + """ + @classmethod def from_dict(cls, value: "RunJobTaskDict") -> "Self": return _transform(cls, value) @@ -44,5 +57,10 @@ class RunJobTaskDict(TypedDict, total=False): Job-level parameters used to trigger the job. """ + pipeline_params: VariableOrOptional[PipelineParamsParam] + """ + Controls whether the pipeline should perform a full refresh + """ + RunJobTaskParam = RunJobTaskDict | RunJobTask diff --git a/experimental/python/databricks/bundles/jobs/_models/sql_task_alert.py b/experimental/python/databricks/bundles/jobs/_models/sql_task_alert.py index 7229b02d16..513eecd710 100644 --- a/experimental/python/databricks/bundles/jobs/_models/sql_task_alert.py +++ b/experimental/python/databricks/bundles/jobs/_models/sql_task_alert.py @@ -1,4 +1,4 @@ -from dataclasses import dataclass +from dataclasses import dataclass, field from typing import TYPE_CHECKING, TypedDict from databricks.bundles.core._transform import _transform @@ -26,14 +26,14 @@ class SqlTaskAlert: The canonical identifier of the SQL alert. """ - subscriptions: VariableOrList[SqlTaskSubscription] + pause_subscriptions: VariableOrOptional[bool] = None """ - If specified, alert notifications are sent to subscribers. + If true, the alert notifications are not sent to subscribers. """ - pause_subscriptions: VariableOrOptional[bool] = None + subscriptions: VariableOrList[SqlTaskSubscription] = field(default_factory=list) """ - If true, the alert notifications are not sent to subscribers. + If specified, alert notifications are sent to subscribers. """ @classmethod @@ -52,14 +52,14 @@ class SqlTaskAlertDict(TypedDict, total=False): The canonical identifier of the SQL alert. """ - subscriptions: VariableOrList[SqlTaskSubscriptionParam] + pause_subscriptions: VariableOrOptional[bool] """ - If specified, alert notifications are sent to subscribers. + If true, the alert notifications are not sent to subscribers. """ - pause_subscriptions: VariableOrOptional[bool] + subscriptions: VariableOrList[SqlTaskSubscriptionParam] """ - If true, the alert notifications are not sent to subscribers. + If specified, alert notifications are sent to subscribers. """