Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .codegen/_openapi_sha
Original file line number Diff line number Diff line change
@@ -1 +1 @@
44cbc832f1b070c47544ff470fd8498853d24cf3
f21f4933da405cac4bc77c9732044dc45b4f0c5a
3 changes: 3 additions & 0 deletions NEXT_CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,3 +13,6 @@
### Internal Changes

### API Changes
* Add `table_deltasharing_open_dir_based` enum value for `databricks.sdk.service.catalog.SecurableKind`.
* Add `creating` and `create_failed` enum values for `databricks.sdk.service.settings.NccPrivateEndpointRulePrivateLinkConnectionState`.
* [Breaking] Remove `access_modes` and `storage_location` fields for `databricks.sdk.service.sharing.Table`.
3 changes: 2 additions & 1 deletion databricks/sdk/service/catalog.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 3 additions & 1 deletion databricks/sdk/service/postgres.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions databricks/sdk/service/settings.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

17 changes: 0 additions & 17 deletions databricks/sdk/service/sharing.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 3 additions & 3 deletions docs/account/provisioning/credentials.rst
Original file line number Diff line number Diff line change
Expand Up @@ -24,15 +24,15 @@

a = AccountClient()

role = a.credentials.create(
creds = a.credentials.create(
credentials_name=f"sdk-{time.time_ns()}",
aws_credentials=provisioning.CreateCredentialAwsCredentials(
sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_CROSSACCOUNT_ARN"])
sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_LOGDELIVERY_ARN"])
),
)

# cleanup
a.credentials.delete(credentials_id=role.credentials_id)
a.credentials.delete(credentials_id=creds.credentials_id)

Creates a Databricks credential configuration that represents cloud cross-account credentials for a
specified account. Databricks uses this to set up network infrastructure properly to host Databricks
Expand Down
5 changes: 4 additions & 1 deletion docs/account/provisioning/storage.rst
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,13 @@

a = AccountClient()

storage = a.storage.create(
bucket = a.storage.create(
storage_configuration_name=f"sdk-{time.time_ns()}",
root_bucket_info=provisioning.RootBucketInfo(bucket_name=f"sdk-{time.time_ns()}"),
)

# cleanup
a.storage.delete(storage_configuration_id=bucket.storage_configuration_id)

Creates a Databricks storage configuration for an account.

Expand Down
5 changes: 4 additions & 1 deletion docs/dbdataclasses/catalog.rst
Original file line number Diff line number Diff line change
Expand Up @@ -1497,7 +1497,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo

.. py:class:: SecurableKind

Latest kind: CONNECTION_ONELAKE = 289; Next id:290
Latest kind: TABLE_DELTASHARING_OPEN_DIR_BASED = 290; Next id:291

.. py:attribute:: TABLE_DB_STORAGE
:value: "TABLE_DB_STORAGE"
Expand All @@ -1511,6 +1511,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
.. py:attribute:: TABLE_DELTASHARING_MUTABLE
:value: "TABLE_DELTASHARING_MUTABLE"

.. py:attribute:: TABLE_DELTASHARING_OPEN_DIR_BASED
:value: "TABLE_DELTASHARING_OPEN_DIR_BASED"

.. py:attribute:: TABLE_DELTA_EXTERNAL
:value: "TABLE_DELTA_EXTERNAL"

Expand Down
6 changes: 6 additions & 0 deletions docs/dbdataclasses/settings.rst
Original file line number Diff line number Diff line change
Expand Up @@ -653,6 +653,12 @@ These dataclasses are used in the SDK to represent API requests and responses fo

.. py:class:: NccPrivateEndpointRulePrivateLinkConnectionState

.. py:attribute:: CREATE_FAILED
:value: "CREATE_FAILED"

.. py:attribute:: CREATING
:value: "CREATING"

.. py:attribute:: DISCONNECTED
:value: "DISCONNECTED"

Expand Down
7 changes: 3 additions & 4 deletions docs/workspace/catalog/catalogs.rst
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,10 @@

w = WorkspaceClient()

new_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")
created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")

# cleanup
w.catalogs.delete(name=new_catalog.name, force=True)
w.catalogs.delete(name=created_catalog.name, force=True)

Creates a new catalog instance in the parent metastore if the caller is a metastore admin or has the
**CREATE_CATALOG** privilege.
Expand Down Expand Up @@ -155,13 +155,12 @@
import time

from databricks.sdk import WorkspaceClient
from databricks.sdk.service import catalog

w = WorkspaceClient()

created = w.catalogs.create(name=f"sdk-{time.time_ns()}")

_ = w.catalogs.update(name=created.name, isolation_mode=catalog.CatalogIsolationMode.ISOLATED)
_ = w.catalogs.update(name=created.name, comment="updated")

# cleanup
w.catalogs.delete(name=created.name, force=True)
Expand Down
17 changes: 8 additions & 9 deletions docs/workspace/catalog/external_locations.rst
Original file line number Diff line number Diff line change
Expand Up @@ -30,22 +30,20 @@

w = WorkspaceClient()

storage_credential = w.storage_credentials.create(
credential = w.storage_credentials.create(
name=f"sdk-{time.time_ns()}",
aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
comment="created via SDK",
)

external_location = w.external_locations.create(
created = w.external_locations.create(
name=f"sdk-{time.time_ns()}",
credential_name=storage_credential.name,
comment="created via SDK",
url="s3://" + os.environ["TEST_BUCKET"] + "/" + f"sdk-{time.time_ns()}",
credential_name=credential.name,
url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"),
)

# cleanup
w.storage_credentials.delete(name=storage_credential.name)
w.external_locations.delete(name=external_location.name)
w.storage_credentials.delete(name=credential.name)
w.external_locations.delete(name=created.name)

Creates a new external location entry in the metastore. The caller must be a metastore admin or have
the **CREATE_EXTERNAL_LOCATION** privilege on both the metastore and the associated storage
Expand Down Expand Up @@ -142,10 +140,11 @@
.. code-block::

from databricks.sdk import WorkspaceClient
from databricks.sdk.service import catalog

w = WorkspaceClient()

all = w.external_locations.list()
all = w.external_locations.list(catalog.ListExternalLocationsRequest())

Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore. The caller
must be a metastore admin, the owner of the external location, or a user that has some privilege on
Expand Down
8 changes: 4 additions & 4 deletions docs/workspace/catalog/schemas.rst
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,13 @@

w = WorkspaceClient()

new_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")
created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")

created = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=new_catalog.name)
created_schema = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=created_catalog.name)

# cleanup
w.catalogs.delete(name=new_catalog.name, force=True)
w.schemas.delete(full_name=created.full_name)
w.catalogs.delete(name=created_catalog.name, force=True)
w.schemas.delete(full_name=created_schema.full_name)

Creates a new schema for catalog in the Metastore. The caller must be a metastore admin, or have the
**CREATE_SCHEMA** privilege in the parent catalog.
Expand Down
11 changes: 5 additions & 6 deletions docs/workspace/catalog/storage_credentials.rst
Original file line number Diff line number Diff line change
Expand Up @@ -30,14 +30,13 @@

w = WorkspaceClient()

storage_credential = w.storage_credentials.create(
credential = w.storage_credentials.create(
name=f"sdk-{time.time_ns()}",
aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
comment="created via SDK",
)

# cleanup
w.storage_credentials.delete(name=storage_credential.name)
w.storage_credentials.delete(name=credential.name)

Creates a new storage credential.

Expand Down Expand Up @@ -99,13 +98,13 @@

created = w.storage_credentials.create(
name=f"sdk-{time.time_ns()}",
aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
)

by_name = w.storage_credentials.get(get=created.name)
by_name = w.storage_credentials.get(name=created.name)

# cleanup
w.storage_credentials.delete(delete=created.name)
w.storage_credentials.delete(name=created.name)

Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the
storage credential, or have some permission on the storage credential.
Expand Down
2 changes: 1 addition & 1 deletion docs/workspace/catalog/tables.rst
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@

created_schema = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=created_catalog.name)

all_tables = w.tables.list(catalog_name=created_catalog.name, schema_name=created_schema.name)
summaries = w.tables.list_summaries(catalog_name=created_catalog.name, schema_name_pattern=created_schema.name)

# cleanup
w.schemas.delete(full_name=created_schema.full_name)
Expand Down
3 changes: 1 addition & 2 deletions docs/workspace/compute/clusters.rst
Original file line number Diff line number Diff line change
Expand Up @@ -645,11 +645,10 @@
.. code-block::

from databricks.sdk import WorkspaceClient
from databricks.sdk.service import compute

w = WorkspaceClient()

all = w.clusters.list(compute.ListClustersRequest())
nodes = w.clusters.list_node_types()

Return information about all pinned and active clusters, and all clusters terminated within the last
30 days. Clusters terminated prior to this period are not included.
Expand Down
2 changes: 1 addition & 1 deletion docs/workspace/iam/permissions.rst
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@

obj = w.workspace.get_status(path=notebook_path)

_ = w.permissions.get(request_object_type="notebooks", request_object_id="%d" % (obj.object_id))
levels = w.permissions.get_permission_levels(request_object_type="notebooks", request_object_id="%d" % (obj.object_id))

Gets the permissions of an object. Objects can inherit permissions from their parent objects or root
object.
Expand Down
11 changes: 4 additions & 7 deletions docs/workspace/ml/model_registry.rst
Original file line number Diff line number Diff line change
Expand Up @@ -91,8 +91,6 @@
w = WorkspaceClient()

model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}")

created = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp")

Creates a new registered model with the name specified in the request body. Throws
`RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists.
Expand Down Expand Up @@ -736,14 +734,13 @@

w = WorkspaceClient()

model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}")
created = w.model_registry.create_model(name=f"sdk-{time.time_ns()}")

created = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp")
model = w.model_registry.get_model(name=created.registered_model.name)

w.model_registry.update_model_version(
w.model_registry.update_model(
name=model.registered_model_databricks.name,
description=f"sdk-{time.time_ns()}",
name=created.model_version.name,
version=created.model_version.version,
)

Updates a registered model.
Expand Down
2 changes: 1 addition & 1 deletion docs/workspace/sql/queries.rst
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
display_name=f"sdk-{time.time_ns()}",
warehouse_id=srcs[0].warehouse_id,
description="test query from Go SDK",
query_text="SHOW TABLES",
query_text="SELECT 1",
)
)

Expand Down
10 changes: 6 additions & 4 deletions docs/workspace/workspace/workspace.rst
Original file line number Diff line number Diff line change
Expand Up @@ -232,14 +232,16 @@

.. code-block::

import os
import time

from databricks.sdk import WorkspaceClient

w = WorkspaceClient()

names = []
for i in w.workspace.list(f"/Users/{w.current_user.me().user_name}", recursive=True):
names.append(i.path)
assert len(names) > 0
notebook = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"

objects = w.workspace.list(path=os.path.dirname(notebook))

List workspace objects

Expand Down
Loading