Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions bundle/internal/schema/annotations_openapi_overrides.yml
Original file line number Diff line number Diff line change
Expand Up @@ -803,6 +803,11 @@ github.com/databricks/databricks-sdk-go/service/compute.InitScriptInfo:
"abfss":
"description": |-
Contains the Azure Data Lake Storage destination path
github.com/databricks/databricks-sdk-go/service/compute.Kind:
"_":
"enum":
- |-
CLASSIC_PREVIEW
github.com/databricks/databricks-sdk-go/service/compute.LogAnalyticsInfo:
"log_analytics_primary_key":
"description": |-
Expand Down
13 changes: 12 additions & 1 deletion bundle/schema/jsonschema.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 0 additions & 4 deletions experimental/python/codegen/codegen/jsonschema_patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,6 @@
from codegen.jsonschema import Schema

REMOVED_FIELDS = {
"compute.ClusterSpec": {
# doesn't work, openapi schema needs to be updated to be enum
"kind",
},
# fields that were deprecated a long time ago
"resources.Pipeline": {
# 'trigger' is deprecated, use 'continuous' or schedule pipeline refresh using job instead
Expand Down
4 changes: 0 additions & 4 deletions experimental/python/codegen/codegen/packages.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,10 +57,6 @@ def is_resource(ref: str) -> bool:
def should_load_ref(ref: str) -> bool:
name = ref.split("/")[-1]

# FIXME doesn't work, looks like enum, but doesn't have any values specified
if name == "compute.Kind":
return False

for namespace in LOADED_NAMESPACES:
if name.startswith(f"{namespace}."):
return True
Expand Down
3 changes: 3 additions & 0 deletions experimental/python/databricks/bundles/jobs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,8 @@
"JobsHealthRules",
"JobsHealthRulesDict",
"JobsHealthRulesParam",
"Kind",
"KindParam",
"Library",
"LibraryDict",
"LibraryParam",
Expand Down Expand Up @@ -484,6 +486,7 @@
JobsHealthRulesDict,
JobsHealthRulesParam,
)
from databricks.bundles.jobs._models.kind import Kind, KindParam
from databricks.bundles.jobs._models.library import Library, LibraryDict, LibraryParam
from databricks.bundles.jobs._models.lifecycle import (
Lifecycle,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
InitScriptInfo,
InitScriptInfoParam,
)
from databricks.bundles.jobs._models.kind import Kind, KindParam
from databricks.bundles.jobs._models.runtime_engine import (
RuntimeEngine,
RuntimeEngineParam,
Expand Down Expand Up @@ -171,6 +172,8 @@ class ClusterSpec:
When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers`
"""

kind: VariableOrOptional[Kind] = None

node_type_id: VariableOrOptional[str] = None
"""
This field encodes, through a single value, the resources available to each of
Expand Down Expand Up @@ -384,6 +387,8 @@ class ClusterSpecDict(TypedDict, total=False):
When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers`
"""

kind: VariableOrOptional[KindParam]

node_type_id: VariableOrOptional[str]
"""
This field encodes, through a single value, the resources available to each of
Expand Down
9 changes: 9 additions & 0 deletions experimental/python/databricks/bundles/jobs/_models/kind.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
from enum import Enum
from typing import Literal


class Kind(Enum):
CLASSIC_PREVIEW = "CLASSIC_PREVIEW"


KindParam = Literal["CLASSIC_PREVIEW"] | Kind
Loading