Skip to content

Commit c71839b

Browse files
kanterovpietern
andcommitted
[Python] Support pipelines (#2574)
## Changes Add support for pipelines into `experimental/python`. Only the `name` property is supported, and the generated code will be added as a follow-up. ## Why It allows using pipelines in addition to jobs. ## Tests Unit and acceptance tests --------- Co-authored-by: Pieter Noordhuis <pieter.noordhuis@databricks.com>
1 parent 7fa0a8b commit c71839b

File tree

24 files changed

+600
-58
lines changed

24 files changed

+600
-58
lines changed

acceptance/bundle/python/mutator-ordering/script

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
UV_ARGS="${UV_ARGS//\[\DATABRICKS_BUNDLES_WHEEL\]/$DATABRICKS_BUNDLES_WHEEL}"
1+
echo "$DATABRICKS_BUNDLES_WHEEL" > "requirements-latest.txt"
22

33
# after mutators are applied, we expect to record location of the last mutator that had any effect
44

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
bundle:
2+
name: my_project
3+
4+
sync: { paths: [ ] } # don't need to copy files
5+
6+
experimental:
7+
python:
8+
resources:
9+
- "resources:load_resources"
10+
mutators:
11+
- "mutators:update_pipeline"
12+
13+
resources:
14+
pipelines:
15+
my_pipeline_1:
16+
name: "My Pipeline 1"
Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
from dataclasses import replace
2+
3+
from databricks.bundles.core import pipeline_mutator
4+
from databricks.bundles.pipelines import Pipeline
5+
6+
7+
@pipeline_mutator
8+
def update_pipeline(pipeline: Pipeline) -> Pipeline:
9+
assert isinstance(pipeline.name, str)
10+
11+
return replace(pipeline, name=f"{pipeline.name} (updated)")
Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
2+
>>> uv run [UV_ARGS] -q [CLI] bundle validate --output json
3+
{
4+
"experimental": {
5+
"python": {
6+
"mutators": [
7+
"mutators:update_pipeline"
8+
],
9+
"resources": [
10+
"resources:load_resources"
11+
]
12+
}
13+
},
14+
"resources": {
15+
"pipelines": {
16+
"my_pipeline_1": {
17+
"deployment": {
18+
"kind": "BUNDLE",
19+
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/my_project/default/state/metadata.json"
20+
},
21+
"name": "My Pipeline 1 (updated)",
22+
"permissions": []
23+
},
24+
"my_pipeline_2": {
25+
"deployment": {
26+
"kind": "BUNDLE",
27+
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/my_project/default/state/metadata.json"
28+
},
29+
"name": "My Pipeline 2 (updated)",
30+
"permissions": []
31+
}
32+
}
33+
}
34+
}
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
from databricks.bundles.core import Resources
2+
3+
4+
def load_resources() -> Resources:
5+
resources = Resources()
6+
7+
resources.add_pipeline("my_pipeline_2", {"name": "My Pipeline 2"})
8+
9+
return resources
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
echo "$DATABRICKS_BUNDLES_WHEEL" > "requirements-latest.txt"
2+
3+
trace uv run $UV_ARGS -q $CLI bundle validate --output json | \
4+
jq "pick(.experimental.python, .resources)"
5+
6+
rm -fr .databricks __pycache__
Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
Local = true
2+
Cloud = false # tests don't interact with APIs
3+
4+
[EnvMatrix]
5+
UV_ARGS = [
6+
# pipelines are only supported in the latest version of the wheel
7+
"--with-requirements requirements-latest.txt --no-cache",
8+
]

acceptance/bundle/python/resolve-variable/script

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
UV_ARGS="${UV_ARGS//\[\DATABRICKS_BUNDLES_WHEEL\]/$DATABRICKS_BUNDLES_WHEEL}"
1+
echo "$DATABRICKS_BUNDLES_WHEEL" > "requirements-latest.txt"
22

33
trace uv run $UV_ARGS -q $CLI bundle validate --output json | \
44
jq "pick(.experimental.python, .resources)"

acceptance/bundle/python/resource-loading/script

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
UV_ARGS="${UV_ARGS//\[\DATABRICKS_BUNDLES_WHEEL\]/$DATABRICKS_BUNDLES_WHEEL}"
1+
echo "$DATABRICKS_BUNDLES_WHEEL" > "requirements-latest.txt"
22

33
# each job should record location where add_job function was called
44

acceptance/bundle/python/restricted-execution/script

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
UV_ARGS="${UV_ARGS//\[\DATABRICKS_BUNDLES_WHEEL\]/$DATABRICKS_BUNDLES_WHEEL}"
1+
echo "$DATABRICKS_BUNDLES_WHEEL" > "requirements-latest.txt"
22

33
export SOME_ENV_VAR="value_from_env"
44

0 commit comments

Comments
 (0)