Skip to content

Commit a0dd6bc

Browse files
authored
Regenerated Python code after Go SDK upgrade (#2817)
## Changes Regenerated Python code after Go SDK upgrade ## Why It should be part of Go SDK upgrade but was not done, also added it to the generate step
1 parent 92c60e3 commit a0dd6bc

16 files changed

+98
-94
lines changed

.codegen.json

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,6 @@
2121
"go"
2222
],
2323
"post_generate": [
24-
"[ ! -f tagging.py ] || mv tagging.py internal/genkit/tagging.py",
25-
"rm .github/workflows/next-changelog.yml",
2624
"go test -timeout 240s -run TestConsistentDatabricksSdkVersion github.com/databricks/cli/internal/build",
2725
"make schema",
2826
"echo 'bundle/internal/tf/schema/\\*.go linguist-generated=true' >> ./.gitattributes",

experimental/python/databricks/bundles/compute/_models/environment.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,8 @@
1212
@dataclass(kw_only=True)
1313
class Environment:
1414
"""
15-
The environment entity used to preserve serverless environment side panel and jobs' environment for non-notebook task.
15+
The environment entity used to preserve serverless environment side panel, jobs' environment for non-notebook task, and DLT's environment for classic and serverless pipelines.
16+
(Note: DLT uses a copied version of the Environment proto below, at //spark/pipelines/api/protos/copied/libraries-environments-copy.proto)
1617
In this minimal environment spec, only pip dependencies are supported.
1718
"""
1819

experimental/python/databricks/bundles/jobs/_models/job.py

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -143,9 +143,7 @@ class Job(Resource):
143143

144144
performance_target: VariableOrOptional[PerformanceTarget] = None
145145
"""
146-
:meta private: [EXPERIMENTAL]
147-
148-
The performance mode on a serverless job. The performance target determines the level of compute performance or cost-efficiency for the run.
146+
The performance mode on a serverless job. This field determines the level of compute performance or cost-efficiency for the run.
149147
150148
* `STANDARD`: Enables cost-efficient execution of serverless workloads.
151149
* `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.
@@ -276,9 +274,7 @@ class JobDict(TypedDict, total=False):
276274

277275
performance_target: VariableOrOptional[PerformanceTargetParam]
278276
"""
279-
:meta private: [EXPERIMENTAL]
280-
281-
The performance mode on a serverless job. The performance target determines the level of compute performance or cost-efficiency for the run.
277+
The performance mode on a serverless job. This field determines the level of compute performance or cost-efficiency for the run.
282278
283279
* `STANDARD`: Enables cost-efficient execution of serverless workloads.
284280
* `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.

experimental/python/databricks/bundles/jobs/_models/performance_target.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,6 @@
44

55
class PerformanceTarget(Enum):
66
"""
7-
:meta private: [EXPERIMENTAL]
8-
97
PerformanceTarget defines how performant (lower latency) or cost efficient the execution of run on serverless compute should be.
108
The performance mode on the job or pipeline should map to a performance setting that is passed to Cluster Manager
119
(see cluster-common PerformanceTarget).

experimental/python/databricks/bundles/pipelines/_models/file_library.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ class FileLibrary:
1515

1616
path: VariableOrOptional[str] = None
1717
"""
18-
The absolute path of the file.
18+
The absolute path of the source code.
1919
"""
2020

2121
@classmethod
@@ -31,7 +31,7 @@ class FileLibraryDict(TypedDict, total=False):
3131

3232
path: VariableOrOptional[str]
3333
"""
34-
The absolute path of the file.
34+
The absolute path of the source code.
3535
"""
3636

3737

experimental/python/databricks/bundles/pipelines/_models/ingestion_gateway_pipeline_definition.py

Lines changed: 15 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33

44
from databricks.bundles.core._transform import _transform
55
from databricks.bundles.core._transform_to_json import _transform_to_json_value
6-
from databricks.bundles.core._variable import VariableOrOptional
6+
from databricks.bundles.core._variable import VariableOr, VariableOrOptional
77

88
if TYPE_CHECKING:
99
from typing_extensions import Self
@@ -15,27 +15,26 @@ class IngestionGatewayPipelineDefinition:
1515
:meta private: [EXPERIMENTAL]
1616
"""
1717

18-
connection_name: VariableOrOptional[str] = None
18+
connection_name: VariableOr[str]
1919
"""
2020
Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.
2121
"""
2222

23-
gateway_storage_catalog: VariableOrOptional[str] = None
23+
gateway_storage_catalog: VariableOr[str]
2424
"""
2525
Required, Immutable. The name of the catalog for the gateway pipeline's storage location.
2626
"""
2727

28+
gateway_storage_schema: VariableOr[str]
29+
"""
30+
Required, Immutable. The name of the schema for the gateway pipelines's storage location.
31+
"""
32+
2833
gateway_storage_name: VariableOrOptional[str] = None
2934
"""
3035
Optional. The Unity Catalog-compatible name for the gateway storage location.
3136
This is the destination to use for the data that is extracted by the gateway.
3237
Delta Live Tables system will automatically create the storage location under the catalog and schema.
33-
34-
"""
35-
36-
gateway_storage_schema: VariableOrOptional[str] = None
37-
"""
38-
Required, Immutable. The name of the schema for the gateway pipelines's storage location.
3938
"""
4039

4140
@classmethod
@@ -49,27 +48,26 @@ def as_dict(self) -> "IngestionGatewayPipelineDefinitionDict":
4948
class IngestionGatewayPipelineDefinitionDict(TypedDict, total=False):
5049
""""""
5150

52-
connection_name: VariableOrOptional[str]
51+
connection_name: VariableOr[str]
5352
"""
5453
Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.
5554
"""
5655

57-
gateway_storage_catalog: VariableOrOptional[str]
56+
gateway_storage_catalog: VariableOr[str]
5857
"""
5958
Required, Immutable. The name of the catalog for the gateway pipeline's storage location.
6059
"""
6160

61+
gateway_storage_schema: VariableOr[str]
62+
"""
63+
Required, Immutable. The name of the schema for the gateway pipelines's storage location.
64+
"""
65+
6266
gateway_storage_name: VariableOrOptional[str]
6367
"""
6468
Optional. The Unity Catalog-compatible name for the gateway storage location.
6569
This is the destination to use for the data that is extracted by the gateway.
6670
Delta Live Tables system will automatically create the storage location under the catalog and schema.
67-
68-
"""
69-
70-
gateway_storage_schema: VariableOrOptional[str]
71-
"""
72-
Required, Immutable. The name of the schema for the gateway pipelines's storage location.
7371
"""
7472

7573

experimental/python/databricks/bundles/pipelines/_models/notebook_library.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ class NotebookLibrary:
1515

1616
path: VariableOrOptional[str] = None
1717
"""
18-
The absolute path of the notebook.
18+
The absolute path of the source code.
1919
"""
2020

2121
@classmethod
@@ -31,7 +31,7 @@ class NotebookLibraryDict(TypedDict, total=False):
3131

3232
path: VariableOrOptional[str]
3333
"""
34-
The absolute path of the notebook.
34+
The absolute path of the source code.
3535
"""
3636

3737

experimental/python/databricks/bundles/pipelines/_models/notifications.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22,13 +22,11 @@ class Notifications:
2222
* `on-update-failure`: Each time a pipeline update fails.
2323
* `on-update-fatal-failure`: A pipeline update fails with a non-retryable (fatal) error.
2424
* `on-flow-failure`: A single data flow fails.
25-
2625
"""
2726

2827
email_recipients: VariableOrList[str] = field(default_factory=list)
2928
"""
3029
A list of email addresses notified when a configured alert is triggered.
31-
3230
"""
3331

3432
@classmethod
@@ -51,13 +49,11 @@ class NotificationsDict(TypedDict, total=False):
5149
* `on-update-failure`: Each time a pipeline update fails.
5250
* `on-update-fatal-failure`: A pipeline update fails with a non-retryable (fatal) error.
5351
* `on-flow-failure`: A single data flow fails.
54-
5552
"""
5653

5754
email_recipients: VariableOrList[str]
5855
"""
5956
A list of email addresses notified when a configured alert is triggered.
60-
6157
"""
6258

6359

experimental/python/databricks/bundles/pipelines/_models/pipeline_cluster.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,6 @@ class PipelineCluster:
7171
for one cluster. If the conf is given, the logs will be delivered to the destination every
7272
`5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while
7373
the destination of executor logs is `$destination/$clusterId/executor`.
74-
7574
"""
7675

7776
custom_tags: VariableOrDict[str] = field(default_factory=dict)
@@ -130,7 +129,6 @@ class PipelineCluster:
130129
the Spark nodes in this cluster. For example, the Spark nodes can be provisioned
131130
and optimized for memory or compute intensive workloads. A list of available node
132131
types can be retrieved by using the :method:clusters/listNodeTypes API call.
133-
134132
"""
135133

136134
num_workers: VariableOrOptional[int] = None
@@ -154,7 +152,6 @@ class PipelineCluster:
154152
"""
155153
An object containing a set of optional, user-specified Spark configuration key-value pairs.
156154
See :method:clusters/create for more details.
157-
158155
"""
159156

160157
spark_env_vars: VariableOrDict[str] = field(default_factory=dict)
@@ -220,7 +217,6 @@ class PipelineClusterDict(TypedDict, total=False):
220217
for one cluster. If the conf is given, the logs will be delivered to the destination every
221218
`5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while
222219
the destination of executor logs is `$destination/$clusterId/executor`.
223-
224220
"""
225221

226222
custom_tags: VariableOrDict[str]
@@ -279,7 +275,6 @@ class PipelineClusterDict(TypedDict, total=False):
279275
the Spark nodes in this cluster. For example, the Spark nodes can be provisioned
280276
and optimized for memory or compute intensive workloads. A list of available node
281277
types can be retrieved by using the :method:clusters/listNodeTypes API call.
282-
283278
"""
284279

285280
num_workers: VariableOrOptional[int]
@@ -303,7 +298,6 @@ class PipelineClusterDict(TypedDict, total=False):
303298
"""
304299
An object containing a set of optional, user-specified Spark configuration key-value pairs.
305300
See :method:clusters/create for more details.
306-
307301
"""
308302

309303
spark_env_vars: VariableOrDict[str]

experimental/python/databricks/bundles/pipelines/_models/pipeline_cluster_autoscale.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,6 @@ class PipelineClusterAutoscale:
3535
the data processing latency of your pipelines. Enhanced Autoscaling is available
3636
for `updates` clusters only. The legacy autoscaling feature is used for `maintenance`
3737
clusters.
38-
3938
"""
4039

4140
@classmethod
@@ -67,7 +66,6 @@ class PipelineClusterAutoscaleDict(TypedDict, total=False):
6766
the data processing latency of your pipelines. Enhanced Autoscaling is available
6867
for `updates` clusters only. The legacy autoscaling feature is used for `maintenance`
6968
clusters.
70-
7169
"""
7270

7371

0 commit comments

Comments
 (0)