diff --git a/bundle/docsgen/main.go b/bundle/docsgen/main.go index 593f3e1dd1..9f585830d7 100644 --- a/bundle/docsgen/main.go +++ b/bundle/docsgen/main.go @@ -137,10 +137,6 @@ func assignAnnotation(s *jsonschema.Schema, a annotation.Descriptor) { s.Deprecated = true s.DeprecationMessage = a.DeprecationMessage } - if a.ForceNotDeprecated { - s.Deprecated = false - s.DeprecationMessage = "" - } if a.Preview == "PRIVATE" { s.DoNotSuggest = true s.Preview = a.Preview diff --git a/bundle/internal/annotation/descriptor.go b/bundle/internal/annotation/descriptor.go index 562d992c4c..7b5dd607bb 100644 --- a/bundle/internal/annotation/descriptor.go +++ b/bundle/internal/annotation/descriptor.go @@ -9,9 +9,6 @@ type Descriptor struct { MarkdownExamples string `json:"markdown_examples,omitempty"` DeprecationMessage string `json:"deprecation_message,omitempty"` Preview string `json:"x-databricks-preview,omitempty"` - - // If true, takes priority over 'DeprecationMessage' - ForceNotDeprecated bool `json:"force_not_deprecated,omitempty"` } const Placeholder = "PLACEHOLDER" diff --git a/bundle/internal/schema/annotations.go b/bundle/internal/schema/annotations.go index 8f65368a74..a40c5c7c8e 100644 --- a/bundle/internal/schema/annotations.go +++ b/bundle/internal/schema/annotations.go @@ -138,11 +138,6 @@ func assignAnnotation(s *jsonschema.Schema, a annotation.Descriptor) { s.Preview = a.Preview } - if a.ForceNotDeprecated { - s.Deprecated = false - s.DeprecationMessage = "" - } - s.MarkdownDescription = convertLinksToAbsoluteUrl(a.MarkdownDescription) s.Title = a.Title s.Enum = a.Enum diff --git a/bundle/internal/schema/annotations_openapi_overrides.yml b/bundle/internal/schema/annotations_openapi_overrides.yml index bddcec5784..a3b69140f6 100644 --- a/bundle/internal/schema/annotations_openapi_overrides.yml +++ b/bundle/internal/schema/annotations_openapi_overrides.yml @@ -353,9 +353,6 @@ github.com/databricks/cli/bundle/config/resources.Pipeline: "run_as": "description": |- PLACEHOLDER - "target": - "force_not_deprecated": |- - true "trigger": "deprecation_message": |- Use continuous instead diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index c0aec94eb8..f3072ef23d 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -1273,7 +1273,9 @@ }, "target": { "description": "Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "deprecationMessage": "This field is deprecated", + "deprecated": true }, "trigger": { "description": "Which pipeline trigger to use. Deprecated: Use `continuous` instead.", diff --git a/experimental/python/codegen/codegen/generated_dataclass.py b/experimental/python/codegen/codegen/generated_dataclass.py index e1888aadab..9070a1e38c 100644 --- a/experimental/python/codegen/codegen/generated_dataclass.py +++ b/experimental/python/codegen/codegen/generated_dataclass.py @@ -102,6 +102,11 @@ class GeneratedField: be marked as experimental in docstring. """ + deprecated: bool + """ + If true, the field is deprecated and should be marked as deprecated in docstring. + """ + def __post_init__(self): if self.default_factory is not None and self.default is not None: raise ValueError("Can't have both default and default_factory", self) @@ -131,6 +136,7 @@ class GeneratedDataclass: fields: list[GeneratedField] extends: list[GeneratedType] experimental: bool + deprecated: bool def generate_field( @@ -156,6 +162,7 @@ def generate_field( default_factory="dict", create_func_default="None", experimental=prop.stage == Stage.PRIVATE, + deprecated=prop.deprecated or False, ) elif field_type.name == "VariableOrList": return GeneratedField( @@ -168,6 +175,7 @@ def generate_field( default_factory="list", create_func_default="None", experimental=prop.stage == Stage.PRIVATE, + deprecated=prop.deprecated or False, ) elif is_required: return GeneratedField( @@ -180,6 +188,7 @@ def generate_field( default_factory=None, create_func_default=None, experimental=prop.stage == Stage.PRIVATE, + deprecated=prop.deprecated or False, ) else: return GeneratedField( @@ -192,6 +201,7 @@ def generate_field( default_factory=None, create_func_default="None", experimental=prop.stage == Stage.PRIVATE, + deprecated=prop.deprecated or False, ) @@ -326,6 +336,7 @@ def generate_dataclass( fields=fields, extends=extends, experimental=schema.stage == Stage.PRIVATE, + deprecated=schema.deprecated or False, ) @@ -365,10 +376,19 @@ def _append_dataclass(b: CodeBuilder, generated: GeneratedDataclass): b.append(":").newline() # FIXME should contain class docstring - if not generated.description and not generated.experimental: + if ( + not generated.description + and not generated.experimental + and not generated.deprecated + ): b.indent().append_triple_quote().append_triple_quote().newline().newline() else: - _append_description(b, generated.description, generated.experimental) + _append_description( + b, + generated.description, + experimental=generated.experimental, + deprecated=generated.deprecated, + ) def _append_field(b: CodeBuilder, field: GeneratedField): @@ -446,7 +466,12 @@ def _append_typed_dict(b: CodeBuilder, generated: GeneratedDataclass): b.indent().append_triple_quote().append_triple_quote().newline().newline() -def _append_description(b: CodeBuilder, description: Optional[str], experimental: bool): +def _append_description( + b: CodeBuilder, description: Optional[str], *, experimental: bool, deprecated: bool +): + if deprecated: + description = "[DEPRECATED] " + (description or "") + if description or experimental: b.indent().append_triple_quote().newline() if experimental: @@ -472,7 +497,12 @@ def get_code(generated: GeneratedDataclass) -> str: for field in generated.fields: _append_field(b, field) - _append_description(b, field.description, field.experimental) + _append_description( + b, + field.description, + experimental=field.experimental, + deprecated=field.deprecated, + ) b.newline() @@ -485,7 +515,12 @@ def get_code(generated: GeneratedDataclass) -> str: for field in generated.fields: _append_typed_dict_field(b, field) - _append_description(b, field.description, field.experimental) + _append_description( + b, + field.description, + experimental=field.experimental, + deprecated=field.deprecated, + ) b.newline() diff --git a/experimental/python/codegen/codegen/generated_enum.py b/experimental/python/codegen/codegen/generated_enum.py index 7f413d5b38..f2eecf82e4 100644 --- a/experimental/python/codegen/codegen/generated_enum.py +++ b/experimental/python/codegen/codegen/generated_enum.py @@ -15,6 +15,7 @@ class GeneratedEnum: values: dict[str, str] description: Optional[str] experimental: bool + deprecated: bool def generate_enum(namespace: str, schema_name: str, schema: Schema) -> GeneratedEnum: @@ -35,6 +36,7 @@ def generate_enum(namespace: str, schema_name: str, schema: Schema) -> Generated values=values, description=schema.description, experimental=schema.stage == Stage.PRIVATE, + deprecated=schema.deprecated or False, ) @@ -48,7 +50,12 @@ def get_code(generated: GeneratedEnum) -> str: b.append(f"class {generated.class_name}(Enum):") b.newline() - _append_description(b, generated.description, generated.experimental) + _append_description( + b, + generated.description, + experimental=generated.experimental, + deprecated=generated.deprecated, + ) # Example: # diff --git a/experimental/python/codegen/codegen/jsonschema_patch.py b/experimental/python/codegen/codegen/jsonschema_patch.py index 848df05069..8cfe2981c3 100644 --- a/experimental/python/codegen/codegen/jsonschema_patch.py +++ b/experimental/python/codegen/codegen/jsonschema_patch.py @@ -7,6 +7,15 @@ # doesn't work, openapi schema needs to be updated to be enum "kind", }, + # fields that were deprecated a long time ago + "resources.Pipeline": { + # 'trigger' is deprecated, use 'continuous' or schedule pipeline refresh using job instead + "trigger", + }, + "pipelines.PipelineLibrary": [ + # 'whl' is deprecated, install libraries through notebooks and %pip command + "whl", + ], } EXTRA_REQUIRED_FIELDS: dict[str, list[str]] = { diff --git a/experimental/python/codegen/codegen/main.py b/experimental/python/codegen/codegen/main.py index 18a11cc627..df26810338 100644 --- a/experimental/python/codegen/codegen/main.py +++ b/experimental/python/codegen/codegen/main.py @@ -77,13 +77,17 @@ def _transitively_mark_deprecated_and_private( def _remove_deprecated_fields( schemas: dict[str, openapi.Schema], ) -> dict[str, openapi.Schema]: + """ + Remove fields that were deprecated during Private Preview. + """ + new_schemas = {} for name, schema in schemas.items(): if schema.type == openapi.SchemaType.OBJECT: new_properties = {} for field_name, field in schema.properties.items(): - if field.deprecated: + if field.deprecated and field.stage == openapi.Stage.PRIVATE: continue new_properties[field_name] = field diff --git a/experimental/python/codegen/codegen_tests/test_generated_dataclass.py b/experimental/python/codegen/codegen_tests/test_generated_dataclass.py index c41fdfd6d6..22de11b074 100644 --- a/experimental/python/codegen/codegen_tests/test_generated_dataclass.py +++ b/experimental/python/codegen/codegen_tests/test_generated_dataclass.py @@ -69,9 +69,11 @@ def test_generate_dataclass(): param_type_name=variable_or_type(str_type(), is_required=True), type_name=variable_or_type(str_type(), is_required=True), experimental=False, + deprecated=False, ), ], experimental=False, + deprecated=False, ) diff --git a/experimental/python/codegen/codegen_tests/test_generated_enum.py b/experimental/python/codegen/codegen_tests/test_generated_enum.py index f3f122b1a1..300b021e6a 100644 --- a/experimental/python/codegen/codegen_tests/test_generated_enum.py +++ b/experimental/python/codegen/codegen_tests/test_generated_enum.py @@ -19,4 +19,5 @@ def test_generate_enum(): values={"MY_ENUM_VALUE": "myEnumValue"}, description="enum description", experimental=False, + deprecated=False, ) diff --git a/experimental/python/databricks/bundles/jobs/_models/environment.py b/experimental/python/databricks/bundles/jobs/_models/environment.py index c8bdee0917..f7606430ec 100644 --- a/experimental/python/databricks/bundles/jobs/_models/environment.py +++ b/experimental/python/databricks/bundles/jobs/_models/environment.py @@ -16,6 +16,11 @@ class Environment: In this minimal environment spec, only pip dependencies are supported. """ + client: VariableOrOptional[str] = None + """ + [DEPRECATED] Use `environment_version` instead. + """ + dependencies: VariableOrList[str] = field(default_factory=list) """ List of pip dependencies, as supported by the version of pip in this environment. @@ -46,6 +51,11 @@ def as_dict(self) -> "EnvironmentDict": class EnvironmentDict(TypedDict, total=False): """""" + client: VariableOrOptional[str] + """ + [DEPRECATED] Use `environment_version` instead. + """ + dependencies: VariableOrList[str] """ List of pip dependencies, as supported by the version of pip in this environment. diff --git a/experimental/python/databricks/bundles/jobs/_models/gcp_attributes.py b/experimental/python/databricks/bundles/jobs/_models/gcp_attributes.py index cbc2a29630..6f390c1cf8 100644 --- a/experimental/python/databricks/bundles/jobs/_models/gcp_attributes.py +++ b/experimental/python/databricks/bundles/jobs/_models/gcp_attributes.py @@ -53,6 +53,13 @@ class GcpAttributes: for the supported number of local SSDs for each instance type. """ + use_preemptible_executors: VariableOrOptional[bool] = None + """ + [DEPRECATED] This field determines whether the spark executors will be scheduled to run on preemptible + VMs (when set to true) versus standard compute engine VMs (when set to false; default). + Note: Soon to be deprecated, use the 'availability' field instead. + """ + zone_id: VariableOrOptional[str] = None """ Identifier for the availability zone in which the cluster resides. @@ -108,6 +115,13 @@ class GcpAttributesDict(TypedDict, total=False): for the supported number of local SSDs for each instance type. """ + use_preemptible_executors: VariableOrOptional[bool] + """ + [DEPRECATED] This field determines whether the spark executors will be scheduled to run on preemptible + VMs (when set to true) versus standard compute engine VMs (when set to false; default). + Note: Soon to be deprecated, use the 'availability' field instead. + """ + zone_id: VariableOrOptional[str] """ Identifier for the availability zone in which the cluster resides. diff --git a/experimental/python/databricks/bundles/jobs/_models/init_script_info.py b/experimental/python/databricks/bundles/jobs/_models/init_script_info.py index 124741eaf7..f14f185487 100644 --- a/experimental/python/databricks/bundles/jobs/_models/init_script_info.py +++ b/experimental/python/databricks/bundles/jobs/_models/init_script_info.py @@ -8,6 +8,10 @@ Adlsgen2Info, Adlsgen2InfoParam, ) +from databricks.bundles.jobs._models.dbfs_storage_info import ( + DbfsStorageInfo, + DbfsStorageInfoParam, +) from databricks.bundles.jobs._models.gcs_storage_info import ( GcsStorageInfo, GcsStorageInfoParam, @@ -45,6 +49,12 @@ class InitScriptInfo: Contains the Azure Data Lake Storage destination path """ + dbfs: VariableOrOptional[DbfsStorageInfo] = None + """ + [DEPRECATED] destination needs to be provided. e.g. + `{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } }` + """ + file: VariableOrOptional[LocalFileInfo] = None """ destination needs to be provided, e.g. @@ -93,6 +103,12 @@ class InitScriptInfoDict(TypedDict, total=False): Contains the Azure Data Lake Storage destination path """ + dbfs: VariableOrOptional[DbfsStorageInfoParam] + """ + [DEPRECATED] destination needs to be provided. e.g. + `{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } }` + """ + file: VariableOrOptional[LocalFileInfoParam] """ destination needs to be provided, e.g. diff --git a/experimental/python/databricks/bundles/jobs/_models/job_email_notifications.py b/experimental/python/databricks/bundles/jobs/_models/job_email_notifications.py index 4cca930023..b97648f4dc 100644 --- a/experimental/python/databricks/bundles/jobs/_models/job_email_notifications.py +++ b/experimental/python/databricks/bundles/jobs/_models/job_email_notifications.py @@ -3,7 +3,7 @@ from databricks.bundles.core._transform import _transform from databricks.bundles.core._transform_to_json import _transform_to_json_value -from databricks.bundles.core._variable import VariableOrList +from databricks.bundles.core._variable import VariableOrList, VariableOrOptional if TYPE_CHECKING: from typing_extensions import Self @@ -13,6 +13,12 @@ class JobEmailNotifications: """""" + no_alert_for_skipped_runs: VariableOrOptional[bool] = None + """ + [DEPRECATED] If true, do not send email to recipients specified in `on_failure` if the run is skipped. + This field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field. + """ + on_duration_warning_threshold_exceeded: VariableOrList[str] = field( default_factory=list ) @@ -53,6 +59,12 @@ def as_dict(self) -> "JobEmailNotificationsDict": class JobEmailNotificationsDict(TypedDict, total=False): """""" + no_alert_for_skipped_runs: VariableOrOptional[bool] + """ + [DEPRECATED] If true, do not send email to recipients specified in `on_failure` if the run is skipped. + This field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field. + """ + on_duration_warning_threshold_exceeded: VariableOrList[str] """ A list of email addresses to be notified when the duration of a run exceeds the threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. If no rule for the `RUN_DURATION_SECONDS` metric is specified in the `health` field for the job, notifications are not sent. diff --git a/experimental/python/databricks/bundles/jobs/_models/library.py b/experimental/python/databricks/bundles/jobs/_models/library.py index bde8cbaad9..c8d4b5f5b5 100644 --- a/experimental/python/databricks/bundles/jobs/_models/library.py +++ b/experimental/python/databricks/bundles/jobs/_models/library.py @@ -30,6 +30,11 @@ class Library: Specification of a CRAN library to be installed as part of the library """ + egg: VariableOrOptional[str] = None + """ + [DEPRECATED] Deprecated. URI of the egg library to install. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above. + """ + jar: VariableOrOptional[str] = None """ URI of the JAR library to install. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. @@ -82,6 +87,11 @@ class LibraryDict(TypedDict, total=False): Specification of a CRAN library to be installed as part of the library """ + egg: VariableOrOptional[str] + """ + [DEPRECATED] Deprecated. URI of the egg library to install. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above. + """ + jar: VariableOrOptional[str] """ URI of the JAR library to install. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. diff --git a/experimental/python/databricks/bundles/jobs/_models/spark_jar_task.py b/experimental/python/databricks/bundles/jobs/_models/spark_jar_task.py index c4ebcaaf75..40bbe92ba0 100644 --- a/experimental/python/databricks/bundles/jobs/_models/spark_jar_task.py +++ b/experimental/python/databricks/bundles/jobs/_models/spark_jar_task.py @@ -3,7 +3,11 @@ from databricks.bundles.core._transform import _transform from databricks.bundles.core._transform_to_json import _transform_to_json_value -from databricks.bundles.core._variable import VariableOr, VariableOrList +from databricks.bundles.core._variable import ( + VariableOr, + VariableOrList, + VariableOrOptional, +) if TYPE_CHECKING: from typing_extensions import Self @@ -20,6 +24,11 @@ class SparkJarTask: The code must use `SparkContext.getOrCreate` to obtain a Spark context; otherwise, runs of the job fail. """ + jar_uri: VariableOrOptional[str] = None + """ + [DEPRECATED] Deprecated since 04/2016. Provide a `jar` through the `libraries` field instead. For an example, see :method:jobs/create. + """ + parameters: VariableOrList[str] = field(default_factory=list) """ Parameters passed to the main method. @@ -27,6 +36,11 @@ class SparkJarTask: Use [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs. """ + run_as_repl: VariableOrOptional[bool] = None + """ + [DEPRECATED] Deprecated. A value of `false` is no longer supported. + """ + @classmethod def from_dict(cls, value: "SparkJarTaskDict") -> "Self": return _transform(cls, value) @@ -45,6 +59,11 @@ class SparkJarTaskDict(TypedDict, total=False): The code must use `SparkContext.getOrCreate` to obtain a Spark context; otherwise, runs of the job fail. """ + jar_uri: VariableOrOptional[str] + """ + [DEPRECATED] Deprecated since 04/2016. Provide a `jar` through the `libraries` field instead. For an example, see :method:jobs/create. + """ + parameters: VariableOrList[str] """ Parameters passed to the main method. @@ -52,5 +71,10 @@ class SparkJarTaskDict(TypedDict, total=False): Use [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs. """ + run_as_repl: VariableOrOptional[bool] + """ + [DEPRECATED] Deprecated. A value of `false` is no longer supported. + """ + SparkJarTaskParam = SparkJarTaskDict | SparkJarTask diff --git a/experimental/python/databricks/bundles/jobs/_models/task_email_notifications.py b/experimental/python/databricks/bundles/jobs/_models/task_email_notifications.py index a0ead50c37..3583798515 100644 --- a/experimental/python/databricks/bundles/jobs/_models/task_email_notifications.py +++ b/experimental/python/databricks/bundles/jobs/_models/task_email_notifications.py @@ -3,7 +3,7 @@ from databricks.bundles.core._transform import _transform from databricks.bundles.core._transform_to_json import _transform_to_json_value -from databricks.bundles.core._variable import VariableOrList +from databricks.bundles.core._variable import VariableOrList, VariableOrOptional if TYPE_CHECKING: from typing_extensions import Self @@ -13,6 +13,12 @@ class TaskEmailNotifications: """""" + no_alert_for_skipped_runs: VariableOrOptional[bool] = None + """ + [DEPRECATED] If true, do not send email to recipients specified in `on_failure` if the run is skipped. + This field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field. + """ + on_duration_warning_threshold_exceeded: VariableOrList[str] = field( default_factory=list ) @@ -53,6 +59,12 @@ def as_dict(self) -> "TaskEmailNotificationsDict": class TaskEmailNotificationsDict(TypedDict, total=False): """""" + no_alert_for_skipped_runs: VariableOrOptional[bool] + """ + [DEPRECATED] If true, do not send email to recipients specified in `on_failure` if the run is skipped. + This field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field. + """ + on_duration_warning_threshold_exceeded: VariableOrList[str] """ A list of email addresses to be notified when the duration of a run exceeds the threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. If no rule for the `RUN_DURATION_SECONDS` metric is specified in the `health` field for the job, notifications are not sent. diff --git a/experimental/python/databricks/bundles/pipelines/_models/gcp_attributes.py b/experimental/python/databricks/bundles/pipelines/_models/gcp_attributes.py index 53b2e9c76d..1deae0c124 100644 --- a/experimental/python/databricks/bundles/pipelines/_models/gcp_attributes.py +++ b/experimental/python/databricks/bundles/pipelines/_models/gcp_attributes.py @@ -53,6 +53,13 @@ class GcpAttributes: for the supported number of local SSDs for each instance type. """ + use_preemptible_executors: VariableOrOptional[bool] = None + """ + [DEPRECATED] This field determines whether the spark executors will be scheduled to run on preemptible + VMs (when set to true) versus standard compute engine VMs (when set to false; default). + Note: Soon to be deprecated, use the 'availability' field instead. + """ + zone_id: VariableOrOptional[str] = None """ Identifier for the availability zone in which the cluster resides. @@ -108,6 +115,13 @@ class GcpAttributesDict(TypedDict, total=False): for the supported number of local SSDs for each instance type. """ + use_preemptible_executors: VariableOrOptional[bool] + """ + [DEPRECATED] This field determines whether the spark executors will be scheduled to run on preemptible + VMs (when set to true) versus standard compute engine VMs (when set to false; default). + Note: Soon to be deprecated, use the 'availability' field instead. + """ + zone_id: VariableOrOptional[str] """ Identifier for the availability zone in which the cluster resides. diff --git a/experimental/python/databricks/bundles/pipelines/_models/ingestion_gateway_pipeline_definition.py b/experimental/python/databricks/bundles/pipelines/_models/ingestion_gateway_pipeline_definition.py index 9793c02fe7..fd278e5e6d 100644 --- a/experimental/python/databricks/bundles/pipelines/_models/ingestion_gateway_pipeline_definition.py +++ b/experimental/python/databricks/bundles/pipelines/_models/ingestion_gateway_pipeline_definition.py @@ -30,6 +30,11 @@ class IngestionGatewayPipelineDefinition: Required, Immutable. The name of the schema for the gateway pipelines's storage location. """ + connection_id: VariableOrOptional[str] = None + """ + [DEPRECATED] [Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source. + """ + gateway_storage_name: VariableOrOptional[str] = None """ Optional. The Unity Catalog-compatible name for the gateway storage location. @@ -63,6 +68,11 @@ class IngestionGatewayPipelineDefinitionDict(TypedDict, total=False): Required, Immutable. The name of the schema for the gateway pipelines's storage location. """ + connection_id: VariableOrOptional[str] + """ + [DEPRECATED] [Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source. + """ + gateway_storage_name: VariableOrOptional[str] """ Optional. The Unity Catalog-compatible name for the gateway storage location. diff --git a/experimental/python/databricks/bundles/pipelines/_models/init_script_info.py b/experimental/python/databricks/bundles/pipelines/_models/init_script_info.py index 91bc383e42..6a6297a30b 100644 --- a/experimental/python/databricks/bundles/pipelines/_models/init_script_info.py +++ b/experimental/python/databricks/bundles/pipelines/_models/init_script_info.py @@ -8,6 +8,10 @@ Adlsgen2Info, Adlsgen2InfoParam, ) +from databricks.bundles.pipelines._models.dbfs_storage_info import ( + DbfsStorageInfo, + DbfsStorageInfoParam, +) from databricks.bundles.pipelines._models.gcs_storage_info import ( GcsStorageInfo, GcsStorageInfoParam, @@ -45,6 +49,12 @@ class InitScriptInfo: Contains the Azure Data Lake Storage destination path """ + dbfs: VariableOrOptional[DbfsStorageInfo] = None + """ + [DEPRECATED] destination needs to be provided. e.g. + `{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } }` + """ + file: VariableOrOptional[LocalFileInfo] = None """ destination needs to be provided, e.g. @@ -93,6 +103,12 @@ class InitScriptInfoDict(TypedDict, total=False): Contains the Azure Data Lake Storage destination path """ + dbfs: VariableOrOptional[DbfsStorageInfoParam] + """ + [DEPRECATED] destination needs to be provided. e.g. + `{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } }` + """ + file: VariableOrOptional[LocalFileInfoParam] """ destination needs to be provided, e.g. diff --git a/experimental/python/databricks/bundles/pipelines/_models/pipeline.py b/experimental/python/databricks/bundles/pipelines/_models/pipeline.py index 6ca71f4dae..817689f338 100644 --- a/experimental/python/databricks/bundles/pipelines/_models/pipeline.py +++ b/experimental/python/databricks/bundles/pipelines/_models/pipeline.py @@ -203,7 +203,7 @@ class Pipeline(Resource): target: VariableOrOptional[str] = None """ - Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field. + [DEPRECATED] Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field. """ @classmethod @@ -361,7 +361,7 @@ class PipelineDict(TypedDict, total=False): target: VariableOrOptional[str] """ - Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field. + [DEPRECATED] Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field. """