Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions .codegen.json
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,6 @@
"go"
],
"post_generate": [
"[ ! -f tagging.py ] || mv tagging.py internal/genkit/tagging.py",
"rm .github/workflows/next-changelog.yml",
"go test -timeout 240s -run TestConsistentDatabricksSdkVersion github.com/databricks/cli/internal/build",
"make schema",
"echo 'bundle/internal/tf/schema/\\*.go linguist-generated=true' >> ./.gitattributes",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@
@dataclass(kw_only=True)
class Environment:
"""
The environment entity used to preserve serverless environment side panel and jobs' environment for non-notebook task.
The environment entity used to preserve serverless environment side panel, jobs' environment for non-notebook task, and DLT's environment for classic and serverless pipelines.
(Note: DLT uses a copied version of the Environment proto below, at //spark/pipelines/api/protos/copied/libraries-environments-copy.proto)
In this minimal environment spec, only pip dependencies are supported.
"""

Expand Down
8 changes: 2 additions & 6 deletions experimental/python/databricks/bundles/jobs/_models/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,9 +143,7 @@ class Job(Resource):

performance_target: VariableOrOptional[PerformanceTarget] = None
"""
:meta private: [EXPERIMENTAL]

The performance mode on a serverless job. The performance target determines the level of compute performance or cost-efficiency for the run.
The performance mode on a serverless job. This field determines the level of compute performance or cost-efficiency for the run.

* `STANDARD`: Enables cost-efficient execution of serverless workloads.
* `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.
Expand Down Expand Up @@ -276,9 +274,7 @@ class JobDict(TypedDict, total=False):

performance_target: VariableOrOptional[PerformanceTargetParam]
"""
:meta private: [EXPERIMENTAL]

The performance mode on a serverless job. The performance target determines the level of compute performance or cost-efficiency for the run.
The performance mode on a serverless job. This field determines the level of compute performance or cost-efficiency for the run.

* `STANDARD`: Enables cost-efficient execution of serverless workloads.
* `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,6 @@

class PerformanceTarget(Enum):
"""
:meta private: [EXPERIMENTAL]

PerformanceTarget defines how performant (lower latency) or cost efficient the execution of run on serverless compute should be.
The performance mode on the job or pipeline should map to a performance setting that is passed to Cluster Manager
(see cluster-common PerformanceTarget).
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ class FileLibrary:

path: VariableOrOptional[str] = None
"""
The absolute path of the file.
The absolute path of the source code.
"""

@classmethod
Expand All @@ -31,7 +31,7 @@ class FileLibraryDict(TypedDict, total=False):

path: VariableOrOptional[str]
"""
The absolute path of the file.
The absolute path of the source code.
"""


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

from databricks.bundles.core._transform import _transform
from databricks.bundles.core._transform_to_json import _transform_to_json_value
from databricks.bundles.core._variable import VariableOrOptional
from databricks.bundles.core._variable import VariableOr, VariableOrOptional

if TYPE_CHECKING:
from typing_extensions import Self
Expand All @@ -15,27 +15,26 @@ class IngestionGatewayPipelineDefinition:
:meta private: [EXPERIMENTAL]
"""

connection_name: VariableOrOptional[str] = None
connection_name: VariableOr[str]
"""
Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.
"""

gateway_storage_catalog: VariableOrOptional[str] = None
gateway_storage_catalog: VariableOr[str]
"""
Required, Immutable. The name of the catalog for the gateway pipeline's storage location.
"""

gateway_storage_schema: VariableOr[str]
"""
Required, Immutable. The name of the schema for the gateway pipelines's storage location.
"""

gateway_storage_name: VariableOrOptional[str] = None
"""
Optional. The Unity Catalog-compatible name for the gateway storage location.
This is the destination to use for the data that is extracted by the gateway.
Delta Live Tables system will automatically create the storage location under the catalog and schema.

"""

gateway_storage_schema: VariableOrOptional[str] = None
"""
Required, Immutable. The name of the schema for the gateway pipelines's storage location.
"""

@classmethod
Expand All @@ -49,27 +48,26 @@ def as_dict(self) -> "IngestionGatewayPipelineDefinitionDict":
class IngestionGatewayPipelineDefinitionDict(TypedDict, total=False):
""""""

connection_name: VariableOrOptional[str]
connection_name: VariableOr[str]
"""
Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.
"""

gateway_storage_catalog: VariableOrOptional[str]
gateway_storage_catalog: VariableOr[str]
"""
Required, Immutable. The name of the catalog for the gateway pipeline's storage location.
"""

gateway_storage_schema: VariableOr[str]
"""
Required, Immutable. The name of the schema for the gateway pipelines's storage location.
"""

gateway_storage_name: VariableOrOptional[str]
"""
Optional. The Unity Catalog-compatible name for the gateway storage location.
This is the destination to use for the data that is extracted by the gateway.
Delta Live Tables system will automatically create the storage location under the catalog and schema.

"""

gateway_storage_schema: VariableOrOptional[str]
"""
Required, Immutable. The name of the schema for the gateway pipelines's storage location.
"""


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ class NotebookLibrary:

path: VariableOrOptional[str] = None
"""
The absolute path of the notebook.
The absolute path of the source code.
"""

@classmethod
Expand All @@ -31,7 +31,7 @@ class NotebookLibraryDict(TypedDict, total=False):

path: VariableOrOptional[str]
"""
The absolute path of the notebook.
The absolute path of the source code.
"""


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,11 @@ class Notifications:
* `on-update-failure`: Each time a pipeline update fails.
* `on-update-fatal-failure`: A pipeline update fails with a non-retryable (fatal) error.
* `on-flow-failure`: A single data flow fails.

"""

email_recipients: VariableOrList[str] = field(default_factory=list)
"""
A list of email addresses notified when a configured alert is triggered.

"""

@classmethod
Expand All @@ -51,13 +49,11 @@ class NotificationsDict(TypedDict, total=False):
* `on-update-failure`: Each time a pipeline update fails.
* `on-update-fatal-failure`: A pipeline update fails with a non-retryable (fatal) error.
* `on-flow-failure`: A single data flow fails.

"""

email_recipients: VariableOrList[str]
"""
A list of email addresses notified when a configured alert is triggered.

"""


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,6 @@ class PipelineCluster:
for one cluster. If the conf is given, the logs will be delivered to the destination every
`5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while
the destination of executor logs is `$destination/$clusterId/executor`.

"""

custom_tags: VariableOrDict[str] = field(default_factory=dict)
Expand Down Expand Up @@ -130,7 +129,6 @@ class PipelineCluster:
the Spark nodes in this cluster. For example, the Spark nodes can be provisioned
and optimized for memory or compute intensive workloads. A list of available node
types can be retrieved by using the :method:clusters/listNodeTypes API call.

"""

num_workers: VariableOrOptional[int] = None
Expand All @@ -154,7 +152,6 @@ class PipelineCluster:
"""
An object containing a set of optional, user-specified Spark configuration key-value pairs.
See :method:clusters/create for more details.

"""

spark_env_vars: VariableOrDict[str] = field(default_factory=dict)
Expand Down Expand Up @@ -220,7 +217,6 @@ class PipelineClusterDict(TypedDict, total=False):
for one cluster. If the conf is given, the logs will be delivered to the destination every
`5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while
the destination of executor logs is `$destination/$clusterId/executor`.

"""

custom_tags: VariableOrDict[str]
Expand Down Expand Up @@ -279,7 +275,6 @@ class PipelineClusterDict(TypedDict, total=False):
the Spark nodes in this cluster. For example, the Spark nodes can be provisioned
and optimized for memory or compute intensive workloads. A list of available node
types can be retrieved by using the :method:clusters/listNodeTypes API call.

"""

num_workers: VariableOrOptional[int]
Expand All @@ -303,7 +298,6 @@ class PipelineClusterDict(TypedDict, total=False):
"""
An object containing a set of optional, user-specified Spark configuration key-value pairs.
See :method:clusters/create for more details.

"""

spark_env_vars: VariableOrDict[str]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ class PipelineClusterAutoscale:
the data processing latency of your pipelines. Enhanced Autoscaling is available
for `updates` clusters only. The legacy autoscaling feature is used for `maintenance`
clusters.

"""

@classmethod
Expand Down Expand Up @@ -67,7 +66,6 @@ class PipelineClusterAutoscaleDict(TypedDict, total=False):
the data processing latency of your pipelines. Enhanced Autoscaling is available
for `updates` clusters only. The legacy autoscaling feature is used for `maintenance`
clusters.

"""


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ class PipelineClusterAutoscaleMode(Enum):
the data processing latency of your pipelines. Enhanced Autoscaling is available
for `updates` clusters only. The legacy autoscaling feature is used for `maintenance`
clusters.

"""

ENHANCED = "ENHANCED"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,29 +28,25 @@ class PipelineLibrary:
file: VariableOrOptional[FileLibrary] = None
"""
The path to a file that defines a pipeline and is stored in the Databricks Repos.

"""

jar: VariableOrOptional[str] = None
"""
:meta private: [EXPERIMENTAL]

URI of the jar to be installed. Currently only DBFS is supported.

"""

maven: VariableOrOptional[MavenLibrary] = None
"""
:meta private: [EXPERIMENTAL]

Specification of a maven library to be installed.

"""

notebook: VariableOrOptional[NotebookLibrary] = None
"""
The path to a notebook that defines a pipeline and is stored in the Databricks workspace.

"""

@classmethod
Expand All @@ -67,29 +63,25 @@ class PipelineLibraryDict(TypedDict, total=False):
file: VariableOrOptional[FileLibraryParam]
"""
The path to a file that defines a pipeline and is stored in the Databricks Repos.

"""

jar: VariableOrOptional[str]
"""
:meta private: [EXPERIMENTAL]

URI of the jar to be installed. Currently only DBFS is supported.

"""

maven: VariableOrOptional[MavenLibraryParam]
"""
:meta private: [EXPERIMENTAL]

Specification of a maven library to be installed.

"""

notebook: VariableOrOptional[NotebookLibraryParam]
"""
The path to a notebook that defines a pipeline and is stored in the Databricks workspace.

"""


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

from databricks.bundles.core._transform import _transform
from databricks.bundles.core._transform_to_json import _transform_to_json_value
from databricks.bundles.core._variable import VariableOrOptional
from databricks.bundles.core._variable import VariableOr, VariableOrOptional
from databricks.bundles.pipelines._models.table_specific_config import (
TableSpecificConfig,
TableSpecificConfigParam,
Expand All @@ -17,24 +17,24 @@
class ReportSpec:
""""""

destination_catalog: VariableOrOptional[str] = None
destination_catalog: VariableOr[str]
"""
Required. Destination catalog to store table.
"""

destination_schema: VariableOrOptional[str] = None
destination_schema: VariableOr[str]
"""
Required. Destination schema to store table.
"""

destination_table: VariableOrOptional[str] = None
source_url: VariableOr[str]
"""
Required. Destination table name. The pipeline fails if a table with that name already exists.
Required. Report URL in the source system.
"""

source_url: VariableOrOptional[str] = None
destination_table: VariableOrOptional[str] = None
"""
Required. Report URL in the source system.
Required. Destination table name. The pipeline fails if a table with that name already exists.
"""

table_configuration: VariableOrOptional[TableSpecificConfig] = None
Expand All @@ -53,24 +53,24 @@ def as_dict(self) -> "ReportSpecDict":
class ReportSpecDict(TypedDict, total=False):
""""""

destination_catalog: VariableOrOptional[str]
destination_catalog: VariableOr[str]
"""
Required. Destination catalog to store table.
"""

destination_schema: VariableOrOptional[str]
destination_schema: VariableOr[str]
"""
Required. Destination schema to store table.
"""

destination_table: VariableOrOptional[str]
source_url: VariableOr[str]
"""
Required. Destination table name. The pipeline fails if a table with that name already exists.
Required. Report URL in the source system.
"""

source_url: VariableOrOptional[str]
destination_table: VariableOrOptional[str]
"""
Required. Report URL in the source system.
Required. Destination table name. The pipeline fails if a table with that name already exists.
"""

table_configuration: VariableOrOptional[TableSpecificConfigParam]
Expand Down
Loading
Loading