Skip to content

Commit 90e40d7

Browse files
committed
[Python] Support schemas resource type
1 parent 52578a5 commit 90e40d7

File tree

16 files changed

+330
-0
lines changed

16 files changed

+330
-0
lines changed
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
bundle:
2+
name: my_project
3+
4+
sync: {paths: []} # don't need to copy files
5+
6+
experimental:
7+
python:
8+
resources:
9+
- "resources:load_resources"
10+
mutators:
11+
- "mutators:update_schema"
12+
13+
resources:
14+
schemas:
15+
my_schema_1:
16+
name: "My Schema"
17+
catalog_name: "my_catalog"
Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
from dataclasses import replace
2+
3+
from databricks.bundles.core import schema_mutator
4+
from databricks.bundles.schemas import Schema
5+
6+
7+
@schema_mutator
8+
def update_schema(schema: Schema) -> Schema:
9+
assert isinstance(schema.name, str)
10+
11+
return replace(schema, name=f"{schema.name} (updated)")
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
Local = true
2+
Cloud = false
3+
4+
[EnvMatrix]
5+
DATABRICKS_CLI_DEPLOYMENT = ["terraform", "direct-exp"]
6+
UV_ARGS = ["--with-requirements requirements-latest.txt --no-cache"]
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
2+
>>> uv run --with-requirements requirements-latest.txt --no-cache -q [CLI] bundle validate --output json
3+
{
4+
"experimental": {
5+
"python": {
6+
"mutators": [
7+
"mutators:update_schema"
8+
],
9+
"resources": [
10+
"resources:load_resources"
11+
]
12+
}
13+
},
14+
"resources": {
15+
"schemas": {
16+
"my_schema_1": {
17+
"catalog_name": "my_catalog",
18+
"name": "My Schema (updated)"
19+
},
20+
"my_schema_2": {
21+
"catalog_name": "my_catalog_2",
22+
"name": "My Schema (2) (updated)"
23+
}
24+
}
25+
}
26+
}
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
from databricks.bundles.core import Resources
2+
3+
4+
def load_resources() -> Resources:
5+
resources = Resources()
6+
7+
resources.add_schema(
8+
"my_schema_2",
9+
{
10+
"name": "My Schema (2)",
11+
"catalog_name": "my_catalog_2",
12+
},
13+
)
14+
15+
return resources
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
echo "$DATABRICKS_BUNDLES_WHEEL" > "requirements-latest.txt"
2+
3+
trace uv run $UV_ARGS -q $CLI bundle validate --output json | \
4+
jq "pick(.experimental.python, .resources)"
5+
6+
rm -fr .databricks __pycache__
Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
Local = true
2+
Cloud = false # tests don't interact with APIs
3+
4+
[EnvMatrix]
5+
UV_ARGS = [
6+
# pipelines are only supported in the latest version of the wheel
7+
"--with-requirements requirements-latest.txt --no-cache",
8+
]

experimental/python/codegen/codegen/packages.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
RESOURCE_NAMESPACE = {
66
"resources.Job": "jobs",
77
"resources.Pipeline": "pipelines",
8+
"resources.Schema": "schemas",
89
}
910

1011
RESOURCE_TYPES = list(RESOURCE_NAMESPACE.keys())
@@ -17,6 +18,7 @@
1718
"jobs",
1819
"pipelines",
1920
"resources",
21+
"catalog",
2022
]
2123

2224
RENAMES = {

experimental/python/databricks/bundles/core/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
"VariableOrOptional",
1515
"job_mutator",
1616
"pipeline_mutator",
17+
"schema_mutator",
1718
"load_resources_from_current_package_module",
1819
"load_resources_from_module",
1920
"load_resources_from_modules",
@@ -39,6 +40,7 @@
3940
ResourceMutator,
4041
job_mutator,
4142
pipeline_mutator,
43+
schema_mutator,
4244
)
4345
from databricks.bundles.core._resources import Resources
4446
from databricks.bundles.core._variable import (

experimental/python/databricks/bundles/core/_resource_mutator.py

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
if TYPE_CHECKING:
99
from databricks.bundles.jobs._models.job import Job
1010
from databricks.bundles.pipelines._models.pipeline import Pipeline
11+
from databricks.bundles.schemas._models.schema import Schema
1112

1213
_T = TypeVar("_T", bound=Resource)
1314

@@ -127,3 +128,35 @@ def my_pipeline_mutator(bundle: Bundle, pipeline: Pipeline) -> Pipeline:
127128
from databricks.bundles.pipelines._models.pipeline import Pipeline
128129

129130
return ResourceMutator(resource_type=Pipeline, function=function)
131+
132+
133+
@overload
134+
def schema_mutator(
135+
function: Callable[[Bundle, "Schema"], "Schema"],
136+
) -> ResourceMutator["Schema"]: ...
137+
138+
139+
@overload
140+
def schema_mutator(
141+
function: Callable[["Schema"], "Schema"],
142+
) -> ResourceMutator["Schema"]: ...
143+
144+
145+
def schema_mutator(function: Callable) -> ResourceMutator["Schema"]:
146+
"""
147+
Decorator for defining a schema mutator. Function should return a new instance of the schema with the desired changes,
148+
instead of mutating the input schema.
149+
150+
Example:
151+
152+
.. code-block:: python
153+
154+
@schema_mutator
155+
def my_schema_mutator(bundle: Bundle, schema: Schema) -> Schema:
156+
return replace(schema, name="my_schema")
157+
158+
:param function: Function that mutates a schema.
159+
"""
160+
from databricks.bundles.schemas._models.schema import Schema
161+
162+
return ResourceMutator(resource_type=Schema, function=function)

0 commit comments

Comments
 (0)