Skip to content

Commit 7eb092f

Browse files
committed
[Python] Add volumes support
1 parent 0a7bd1a commit 7eb092f

File tree

17 files changed

+373
-1
lines changed

17 files changed

+373
-1
lines changed
Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
bundle:
2+
name: my_project
3+
4+
sync: {paths: []} # don't need to copy files
5+
6+
experimental:
7+
python:
8+
resources:
9+
- "resources:load_resources"
10+
mutators:
11+
- "mutators:update_volume"
12+
13+
resources:
14+
volumes:
15+
my_volume_1:
16+
name: "My Volume"
17+
catalog_name: "my_catalog"
18+
schema_name: "my_schema"
Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
from dataclasses import replace
2+
3+
from databricks.bundles.core import volume_mutator
4+
from databricks.bundles.catalog import Volume
5+
6+
7+
@volume_mutator
8+
def update_volume(volume: Volume) -> Volume:
9+
assert isinstance(volume.name, str)
10+
11+
return replace(volume, name=f"{volume.name} (updated)")
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
Local = true
2+
Cloud = false
3+
4+
[EnvMatrix]
5+
DATABRICKS_CLI_DEPLOYMENT = ["terraform", "direct-exp"]
6+
UV_ARGS = ["--with-requirements requirements-latest.txt --no-cache"]
Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
2+
>>> uv run --with-requirements requirements-latest.txt --no-cache -q [CLI] bundle validate --output json
3+
{
4+
"experimental": {
5+
"python": {
6+
"mutators": [
7+
"mutators:update_volume"
8+
],
9+
"resources": [
10+
"resources:load_resources"
11+
]
12+
}
13+
},
14+
"resources": {
15+
"volumes": {
16+
"my_volume_1": {
17+
"catalog_name": "my_catalog",
18+
"name": "My Volume (updated)",
19+
"schema_name": "my_schema",
20+
"volume_type": "MANAGED"
21+
},
22+
"my_volume_2": {
23+
"catalog_name": "my_catalog_2",
24+
"name": "My Volume (2) (updated)",
25+
"schema_name": "my_schema_2",
26+
"volume_type": "MANAGED"
27+
}
28+
}
29+
}
30+
}
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
from databricks.bundles.core import Resources
2+
3+
4+
def load_resources() -> Resources:
5+
resources = Resources()
6+
7+
resources.add_volume(
8+
"my_volume_2", {
9+
"name": "My Volume (2)",
10+
"catalog_name": "my_catalog_2",
11+
"schema_name": "my_schema_2",
12+
},
13+
)
14+
15+
return resources
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
echo "$DATABRICKS_BUNDLES_WHEEL" > "requirements-latest.txt"
2+
3+
trace uv run $UV_ARGS -q $CLI bundle validate --output json | \
4+
jq "pick(.experimental.python, .resources)"
5+
6+
rm -fr .databricks __pycache__
Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
Local = true
2+
Cloud = false # tests don't interact with APIs
3+
4+
[EnvMatrix]
5+
UV_ARGS = [
6+
# pipelines are only supported in the latest version of the wheel
7+
"--with-requirements requirements-latest.txt --no-cache",
8+
]

experimental/python/codegen/codegen/packages.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,16 +4,19 @@
44
RESOURCE_NAMESPACE_OVERRIDE = {
55
"resources.Job": "jobs",
66
"resources.Pipeline": "pipelines",
7+
"resources.Volume": "catalog",
78
"resources.JobPermission": "jobs",
89
"resources.JobPermissionLevel": "jobs",
910
"resources.PipelinePermission": "pipelines",
1011
"resources.PipelinePermissionLevel": "pipelines",
12+
"resources.Grant": "catalog",
1113
}
1214

1315
# All supported resource types
1416
RESOURCE_TYPES = [
1517
"resources.Job",
1618
"resources.Pipeline",
19+
"resources.Volume",
1720
]
1821

1922
# Namespaces to load from OpenAPI spec.
@@ -24,6 +27,7 @@
2427
"jobs",
2528
"pipelines",
2629
"resources",
30+
"catalog",
2731
]
2832

2933
RENAMES = {
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
__all__ = [
2+
"Grant",
3+
"GrantDict",
4+
"GrantParam",
5+
"Volume",
6+
"VolumeDict",
7+
"VolumeParam",
8+
"VolumeType",
9+
"VolumeTypeParam",
10+
]
11+
12+
13+
from databricks.bundles.catalog._models.grant import Grant, GrantDict, GrantParam
14+
from databricks.bundles.catalog._models.volume import Volume, VolumeDict, VolumeParam
15+
from databricks.bundles.catalog._models.volume_type import VolumeType, VolumeTypeParam
Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
from dataclasses import dataclass, field
2+
from typing import TYPE_CHECKING, TypedDict
3+
4+
from databricks.bundles.core._transform import _transform
5+
from databricks.bundles.core._transform_to_json import _transform_to_json_value
6+
from databricks.bundles.core._variable import VariableOr, VariableOrList
7+
8+
if TYPE_CHECKING:
9+
from typing_extensions import Self
10+
11+
12+
@dataclass(kw_only=True)
13+
class Grant:
14+
""""""
15+
16+
principal: VariableOr[str]
17+
"""
18+
The name of the principal that will be granted privileges
19+
"""
20+
21+
privileges: VariableOrList[str] = field(default_factory=list)
22+
"""
23+
The privileges to grant to the specified entity
24+
"""
25+
26+
@classmethod
27+
def from_dict(cls, value: "GrantDict") -> "Self":
28+
return _transform(cls, value)
29+
30+
def as_dict(self) -> "GrantDict":
31+
return _transform_to_json_value(self) # type:ignore
32+
33+
34+
class GrantDict(TypedDict, total=False):
35+
""""""
36+
37+
principal: VariableOr[str]
38+
"""
39+
The name of the principal that will be granted privileges
40+
"""
41+
42+
privileges: VariableOrList[str]
43+
"""
44+
The privileges to grant to the specified entity
45+
"""
46+
47+
48+
GrantParam = GrantDict | Grant

0 commit comments

Comments
 (0)