diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 7534b681b..6fba04481 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -c922d5ef891f20c4739edc2fe41f928475414ee1 \ No newline at end of file +328f83d70fc606f9d544f7e1d81a04d2473f6dec \ No newline at end of file diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index 2905c3103..1cfb8e87d 100644 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -1750,7 +1750,7 @@ def from_dict(cls, d: Dict[str, Any]) -> ConnectionInfo: class ConnectionType(Enum): - """Next Id: 52""" + """Next Id: 53""" BIGQUERY = "BIGQUERY" DATABRICKS = "DATABRICKS" @@ -8766,7 +8766,7 @@ def from_dict(cls, d: Dict[str, Any]) -> Securable: class SecurableKind(Enum): - """Latest kind: CONNECTION_TIKTOK_ADS_U2M = 285; Next id: 286""" + """Latest kind: CONNECTION_ONELAKE = 289; Next id:290""" TABLE_DB_STORAGE = "TABLE_DB_STORAGE" TABLE_DELTA = "TABLE_DELTA" diff --git a/docs/account/iam/workspace_assignment.rst b/docs/account/iam/workspace_assignment.rst index ca78b86df..2a8043172 100644 --- a/docs/account/iam/workspace_assignment.rst +++ b/docs/account/iam/workspace_assignment.rst @@ -43,9 +43,9 @@ a = AccountClient() - workspace_id = os.environ["TEST_WORKSPACE_ID"] + workspace_id = os.environ["DUMMY_WORKSPACE_ID"] - all = a.workspace_assignment.list(list=workspace_id) + all = a.workspace_assignment.list(workspace_id=workspace_id) Get the permission assignments for the specified Databricks account and Databricks workspace. diff --git a/docs/account/provisioning/storage.rst b/docs/account/provisioning/storage.rst index b9f080e36..25ee5abaa 100644 --- a/docs/account/provisioning/storage.rst +++ b/docs/account/provisioning/storage.rst @@ -16,7 +16,6 @@ .. code-block:: - import os import time from databricks.sdk import AccountClient @@ -26,11 +25,8 @@ storage = a.storage.create( storage_configuration_name=f"sdk-{time.time_ns()}", - root_bucket_info=provisioning.RootBucketInfo(bucket_name=os.environ["TEST_ROOT_BUCKET"]), + root_bucket_info=provisioning.RootBucketInfo(bucket_name=f"sdk-{time.time_ns()}"), ) - - # cleanup - a.storage.delete(storage_configuration_id=storage.storage_configuration_id) Creates a Databricks storage configuration for an account. diff --git a/docs/dbdataclasses/catalog.rst b/docs/dbdataclasses/catalog.rst index 227b9599d..5886e637c 100644 --- a/docs/dbdataclasses/catalog.rst +++ b/docs/dbdataclasses/catalog.rst @@ -279,7 +279,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ConnectionType - Next Id: 52 + Next Id: 53 .. py:attribute:: BIGQUERY :value: "BIGQUERY" @@ -1497,7 +1497,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: SecurableKind - Latest kind: CONNECTION_TIKTOK_ADS_U2M = 285; Next id: 286 + Latest kind: CONNECTION_ONELAKE = 289; Next id:290 .. py:attribute:: TABLE_DB_STORAGE :value: "TABLE_DB_STORAGE" diff --git a/docs/workspace/catalog/catalogs.rst b/docs/workspace/catalog/catalogs.rst index 62dfa5e3c..0b4d11aaa 100644 --- a/docs/workspace/catalog/catalogs.rst +++ b/docs/workspace/catalog/catalogs.rst @@ -24,10 +24,10 @@ w = WorkspaceClient() - created = w.catalogs.create(name=f"sdk-{time.time_ns()}") + new_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}") # cleanup - w.catalogs.delete(name=created.name, force=True) + w.catalogs.delete(name=new_catalog.name, force=True) Creates a new catalog instance in the parent metastore if the caller is a metastore admin or has the **CREATE_CATALOG** privilege. @@ -155,13 +155,12 @@ import time from databricks.sdk import WorkspaceClient - from databricks.sdk.service import catalog w = WorkspaceClient() created = w.catalogs.create(name=f"sdk-{time.time_ns()}") - _ = w.catalogs.update(name=created.name, isolation_mode=catalog.CatalogIsolationMode.ISOLATED) + _ = w.catalogs.update(name=created.name, comment="updated") # cleanup w.catalogs.delete(name=created.name, force=True) diff --git a/docs/workspace/catalog/external_locations.rst b/docs/workspace/catalog/external_locations.rst index 56ce8ce77..045ab59e6 100644 --- a/docs/workspace/catalog/external_locations.rst +++ b/docs/workspace/catalog/external_locations.rst @@ -30,22 +30,20 @@ w = WorkspaceClient() - storage_credential = w.storage_credentials.create( + credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), - comment="created via SDK", + aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) - external_location = w.external_locations.create( + created = w.external_locations.create( name=f"sdk-{time.time_ns()}", - credential_name=storage_credential.name, - comment="created via SDK", - url="s3://" + os.environ["TEST_BUCKET"] + "/" + f"sdk-{time.time_ns()}", + credential_name=credential.name, + url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}', ) # cleanup - w.storage_credentials.delete(name=storage_credential.name) - w.external_locations.delete(name=external_location.name) + w.storage_credentials.delete(delete=credential.name) + w.external_locations.delete(delete=created.name) Creates a new external location entry in the metastore. The caller must be a metastore admin or have the **CREATE_EXTERNAL_LOCATION** privilege on both the metastore and the associated storage @@ -107,20 +105,20 @@ credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) created = w.external_locations.create( name=f"sdk-{time.time_ns()}", credential_name=credential.name, - url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"), + url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}', ) - _ = w.external_locations.get(name=created.name) + _ = w.external_locations.get(get=created.name) # cleanup - w.storage_credentials.delete(name=credential.name) - w.external_locations.delete(name=created.name) + w.storage_credentials.delete(delete=credential.name) + w.external_locations.delete(delete=created.name) Gets an external location from the metastore. The caller must be either a metastore admin, the owner of the external location, or a user that has some privilege on the external location. diff --git a/docs/workspace/catalog/schemas.rst b/docs/workspace/catalog/schemas.rst index 719d5a156..fd1479c78 100644 --- a/docs/workspace/catalog/schemas.rst +++ b/docs/workspace/catalog/schemas.rst @@ -22,13 +22,13 @@ w = WorkspaceClient() - created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}") + new_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}") - created_schema = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=created_catalog.name) + created = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=new_catalog.name) # cleanup - w.catalogs.delete(name=created_catalog.name, force=True) - w.schemas.delete(full_name=created_schema.full_name) + w.catalogs.delete(name=new_catalog.name, force=True) + w.schemas.delete(full_name=created.full_name) Creates a new schema for catalog in the Metastore. The caller must be a metastore admin, or have the **CREATE_SCHEMA** privilege in the parent catalog. diff --git a/docs/workspace/catalog/storage_credentials.rst b/docs/workspace/catalog/storage_credentials.rst index df7d2bf2b..fda14984e 100644 --- a/docs/workspace/catalog/storage_credentials.rst +++ b/docs/workspace/catalog/storage_credentials.rst @@ -30,14 +30,13 @@ w = WorkspaceClient() - storage_credential = w.storage_credentials.create( + credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), - comment="created via SDK", + aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) # cleanup - w.storage_credentials.delete(name=storage_credential.name) + w.storage_credentials.delete(delete=credential.name) Creates a new storage credential. @@ -124,11 +123,10 @@ .. code-block:: from databricks.sdk import WorkspaceClient - from databricks.sdk.service import catalog w = WorkspaceClient() - all = w.storage_credentials.list(catalog.ListStorageCredentialsRequest()) + all = w.storage_credentials.list() Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is limited to only those storage credentials the caller has permission to access. If the caller is a metastore diff --git a/docs/workspace/compute/clusters.rst b/docs/workspace/compute/clusters.rst index 463e34d0a..8619a5e9a 100644 --- a/docs/workspace/compute/clusters.rst +++ b/docs/workspace/compute/clusters.rst @@ -645,10 +645,11 @@ .. code-block:: from databricks.sdk import WorkspaceClient + from databricks.sdk.service import compute w = WorkspaceClient() - nodes = w.clusters.list_node_types() + all = w.clusters.list(compute.ListClustersRequest()) Return information about all pinned and active clusters, and all clusters terminated within the last 30 days. Clusters terminated prior to this period are not included. diff --git a/docs/workspace/iam/current_user.rst b/docs/workspace/iam/current_user.rst index b2390ce63..2f95213e2 100644 --- a/docs/workspace/iam/current_user.rst +++ b/docs/workspace/iam/current_user.rst @@ -17,7 +17,7 @@ w = WorkspaceClient() - me = w.current_user.me() + me2 = w.current_user.me() Get details about the current method caller's identity. diff --git a/docs/workspace/iam/permissions.rst b/docs/workspace/iam/permissions.rst index ea24afd1a..15524c53e 100644 --- a/docs/workspace/iam/permissions.rst +++ b/docs/workspace/iam/permissions.rst @@ -44,7 +44,7 @@ obj = w.workspace.get_status(path=notebook_path) - _ = w.permissions.get(request_object_type="notebooks", request_object_id="%d" % (obj.object_id)) + levels = w.permissions.get_permission_levels(request_object_type="notebooks", request_object_id="%d" % (obj.object_id)) Gets the permissions of an object. Objects can inherit permissions from their parent objects or root object. diff --git a/docs/workspace/ml/model_registry.rst b/docs/workspace/ml/model_registry.rst index b46d2f47c..3b4d11217 100644 --- a/docs/workspace/ml/model_registry.rst +++ b/docs/workspace/ml/model_registry.rst @@ -90,7 +90,9 @@ w = WorkspaceClient() - created = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") + model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") + + mv = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") Creates a new registered model with the name specified in the request body. Throws `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists. @@ -120,7 +122,7 @@ model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") - created = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") + mv = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") Creates a model version. diff --git a/docs/workspace/sharing/providers.rst b/docs/workspace/sharing/providers.rst index fd81e1b24..1a7c88de9 100644 --- a/docs/workspace/sharing/providers.rst +++ b/docs/workspace/sharing/providers.rst @@ -101,12 +101,25 @@ .. code-block:: + import time + from databricks.sdk import WorkspaceClient - from databricks.sdk.service import sharing w = WorkspaceClient() - all = w.providers.list(sharing.ListProvidersRequest()) + public_share_recipient = """{ + "shareCredentialsVersion":1, + "bearerToken":"dapiabcdefghijklmonpqrstuvwxyz", + "endpoint":"https://sharing.delta.io/delta-sharing/" + } + """ + + created = w.providers.create(name=f"sdk-{time.time_ns()}", recipient_profile_str=public_share_recipient) + + shares = w.providers.list_shares(name=created.name) + + # cleanup + w.providers.delete(name=created.name) Gets an array of available authentication providers. The caller must either be a metastore admin or the owner of the providers. Providers not owned by the caller are not included in the response. There diff --git a/docs/workspace/workspace/workspace.rst b/docs/workspace/workspace/workspace.rst index a7d547fbf..7303aa39d 100644 --- a/docs/workspace/workspace/workspace.rst +++ b/docs/workspace/workspace/workspace.rst @@ -79,7 +79,7 @@ notebook = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}" - export_response = w.workspace.export(format=workspace.ExportFormat.SOURCE, path=notebook) + export_response = w.workspace.export_(format=workspace.ExportFormat.SOURCE, path=notebook) Exports an object or the contents of an entire directory. @@ -180,7 +180,7 @@ w.workspace.import_( path=notebook_path, - overwrite=true_, + overwrite=True, format=workspace.ImportFormat.SOURCE, language=workspace.Language.PYTHON, content=base64.b64encode( @@ -232,14 +232,16 @@ .. code-block:: + import os + import time + from databricks.sdk import WorkspaceClient w = WorkspaceClient() - names = [] - for i in w.workspace.list(f"/Users/{w.current_user.me().user_name}", recursive=True): - names.append(i.path) - assert len(names) > 0 + notebook = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}" + + objects = w.workspace.list(path=os.path.dirname(notebook)) List workspace objects