diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 528b0ddb0..b5ddb9b7a 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -44cbc832f1b070c47544ff470fd8498853d24cf3 \ No newline at end of file +f21f4933da405cac4bc77c9732044dc45b4f0c5a \ No newline at end of file diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index bd7680094..02822d4ea 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -13,3 +13,6 @@ ### Internal Changes ### API Changes +* Add `table_deltasharing_open_dir_based` enum value for `databricks.sdk.service.catalog.SecurableKind`. +* Add `creating` and `create_failed` enum values for `databricks.sdk.service.settings.NccPrivateEndpointRulePrivateLinkConnectionState`. +* [Breaking] Remove `access_modes` and `storage_location` fields for `databricks.sdk.service.sharing.Table`. \ No newline at end of file diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index c5ad98ef0..15c1d9ba1 100644 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -8784,12 +8784,13 @@ def from_dict(cls, d: Dict[str, Any]) -> Securable: class SecurableKind(Enum): - """Latest kind: CONNECTION_ONELAKE = 289; Next id:290""" + """Latest kind: TABLE_DELTASHARING_OPEN_DIR_BASED = 290; Next id:291""" TABLE_DB_STORAGE = "TABLE_DB_STORAGE" TABLE_DELTA = "TABLE_DELTA" TABLE_DELTASHARING = "TABLE_DELTASHARING" TABLE_DELTASHARING_MUTABLE = "TABLE_DELTASHARING_MUTABLE" + TABLE_DELTASHARING_OPEN_DIR_BASED = "TABLE_DELTASHARING_OPEN_DIR_BASED" TABLE_DELTA_EXTERNAL = "TABLE_DELTA_EXTERNAL" TABLE_DELTA_ICEBERG_DELTASHARING = "TABLE_DELTA_ICEBERG_DELTASHARING" TABLE_DELTA_ICEBERG_MANAGED = "TABLE_DELTA_ICEBERG_MANAGED" diff --git a/databricks/sdk/service/postgres.py b/databricks/sdk/service/postgres.py index a01e1c7e3..435d69b74 100644 --- a/databricks/sdk/service/postgres.py +++ b/databricks/sdk/service/postgres.py @@ -1381,7 +1381,9 @@ class RoleRoleSpec: instead for the GROUP identity type.""" identity_type: Optional[RoleIdentityType] = None - """The type of the role.""" + """The type of the role. When specifying a managed-identity, the chosen role_id must be a valid: + + * application ID for SERVICE_PRINCIPAL * user email for USER * group name for GROUP""" def as_dict(self) -> dict: """Serializes the RoleRoleSpec into a dictionary suitable for use as a JSON request body.""" diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py index d3b01fc83..ebae1d89f 100644 --- a/databricks/sdk/service/settings.py +++ b/databricks/sdk/service/settings.py @@ -4183,6 +4183,8 @@ def from_dict(cls, d: Dict[str, Any]) -> NccPrivateEndpointRule: class NccPrivateEndpointRulePrivateLinkConnectionState(Enum): + CREATE_FAILED = "CREATE_FAILED" + CREATING = "CREATING" DISCONNECTED = "DISCONNECTED" ESTABLISHED = "ESTABLISHED" EXPIRED = "EXPIRED" diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py index 5b731be84..6074f1428 100644 --- a/databricks/sdk/service/sharing.py +++ b/databricks/sdk/service/sharing.py @@ -2227,10 +2227,6 @@ class SharedSecurableKind(Enum): @dataclass class Table: - access_modes: Optional[List[str]] = None - """The access modes supported for this table (e.g., "url", "dir"). Used for open sharing to - indicate how the table can be accessed.""" - comment: Optional[str] = None """The comment of the table.""" @@ -2258,17 +2254,12 @@ class Table: share_id: Optional[str] = None """The id of the share that the table belongs to.""" - storage_location: Optional[str] = None - """The cloud storage location of the table for open sharing.""" - tags: Optional[List[catalog.TagKeyValue]] = None """The Tags of the table.""" def as_dict(self) -> dict: """Serializes the Table into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_modes: - body["access_modes"] = [v for v in self.access_modes] if self.comment is not None: body["comment"] = self.comment if self.id is not None: @@ -2287,8 +2278,6 @@ def as_dict(self) -> dict: body["share"] = self.share if self.share_id is not None: body["share_id"] = self.share_id - if self.storage_location is not None: - body["storage_location"] = self.storage_location if self.tags: body["tags"] = [v.as_dict() for v in self.tags] return body @@ -2296,8 +2285,6 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the Table into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_modes: - body["access_modes"] = self.access_modes if self.comment is not None: body["comment"] = self.comment if self.id is not None: @@ -2316,8 +2303,6 @@ def as_shallow_dict(self) -> dict: body["share"] = self.share if self.share_id is not None: body["share_id"] = self.share_id - if self.storage_location is not None: - body["storage_location"] = self.storage_location if self.tags: body["tags"] = self.tags return body @@ -2326,7 +2311,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> Table: """Deserializes the Table from a dictionary.""" return cls( - access_modes=d.get("access_modes", None), comment=d.get("comment", None), id=d.get("id", None), internal_attributes=_from_dict(d, "internal_attributes", TableInternalAttributes), @@ -2336,7 +2320,6 @@ def from_dict(cls, d: Dict[str, Any]) -> Table: schema=d.get("schema", None), share=d.get("share", None), share_id=d.get("share_id", None), - storage_location=d.get("storage_location", None), tags=_repeated_dict(d, "tags", catalog.TagKeyValue), ) diff --git a/docs/account/provisioning/credentials.rst b/docs/account/provisioning/credentials.rst index d63648d58..b71c1707e 100644 --- a/docs/account/provisioning/credentials.rst +++ b/docs/account/provisioning/credentials.rst @@ -24,15 +24,15 @@ a = AccountClient() - role = a.credentials.create( + creds = a.credentials.create( credentials_name=f"sdk-{time.time_ns()}", aws_credentials=provisioning.CreateCredentialAwsCredentials( - sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_CROSSACCOUNT_ARN"]) + sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_LOGDELIVERY_ARN"]) ), ) # cleanup - a.credentials.delete(credentials_id=role.credentials_id) + a.credentials.delete(credentials_id=creds.credentials_id) Creates a Databricks credential configuration that represents cloud cross-account credentials for a specified account. Databricks uses this to set up network infrastructure properly to host Databricks diff --git a/docs/account/provisioning/storage.rst b/docs/account/provisioning/storage.rst index 25ee5abaa..41a04deb3 100644 --- a/docs/account/provisioning/storage.rst +++ b/docs/account/provisioning/storage.rst @@ -23,10 +23,13 @@ a = AccountClient() - storage = a.storage.create( + bucket = a.storage.create( storage_configuration_name=f"sdk-{time.time_ns()}", root_bucket_info=provisioning.RootBucketInfo(bucket_name=f"sdk-{time.time_ns()}"), ) + + # cleanup + a.storage.delete(storage_configuration_id=bucket.storage_configuration_id) Creates a Databricks storage configuration for an account. diff --git a/docs/dbdataclasses/catalog.rst b/docs/dbdataclasses/catalog.rst index 5886e637c..a0435d055 100644 --- a/docs/dbdataclasses/catalog.rst +++ b/docs/dbdataclasses/catalog.rst @@ -1497,7 +1497,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: SecurableKind - Latest kind: CONNECTION_ONELAKE = 289; Next id:290 + Latest kind: TABLE_DELTASHARING_OPEN_DIR_BASED = 290; Next id:291 .. py:attribute:: TABLE_DB_STORAGE :value: "TABLE_DB_STORAGE" @@ -1511,6 +1511,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: TABLE_DELTASHARING_MUTABLE :value: "TABLE_DELTASHARING_MUTABLE" + .. py:attribute:: TABLE_DELTASHARING_OPEN_DIR_BASED + :value: "TABLE_DELTASHARING_OPEN_DIR_BASED" + .. py:attribute:: TABLE_DELTA_EXTERNAL :value: "TABLE_DELTA_EXTERNAL" diff --git a/docs/dbdataclasses/settings.rst b/docs/dbdataclasses/settings.rst index a1687d876..e2863310a 100644 --- a/docs/dbdataclasses/settings.rst +++ b/docs/dbdataclasses/settings.rst @@ -653,6 +653,12 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: NccPrivateEndpointRulePrivateLinkConnectionState + .. py:attribute:: CREATE_FAILED + :value: "CREATE_FAILED" + + .. py:attribute:: CREATING + :value: "CREATING" + .. py:attribute:: DISCONNECTED :value: "DISCONNECTED" diff --git a/docs/workspace/catalog/catalogs.rst b/docs/workspace/catalog/catalogs.rst index d91417852..17297d8dd 100644 --- a/docs/workspace/catalog/catalogs.rst +++ b/docs/workspace/catalog/catalogs.rst @@ -24,10 +24,10 @@ w = WorkspaceClient() - new_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}") + created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}") # cleanup - w.catalogs.delete(name=new_catalog.name, force=True) + w.catalogs.delete(name=created_catalog.name, force=True) Creates a new catalog instance in the parent metastore if the caller is a metastore admin or has the **CREATE_CATALOG** privilege. @@ -155,13 +155,12 @@ import time from databricks.sdk import WorkspaceClient - from databricks.sdk.service import catalog w = WorkspaceClient() created = w.catalogs.create(name=f"sdk-{time.time_ns()}") - _ = w.catalogs.update(name=created.name, isolation_mode=catalog.CatalogIsolationMode.ISOLATED) + _ = w.catalogs.update(name=created.name, comment="updated") # cleanup w.catalogs.delete(name=created.name, force=True) diff --git a/docs/workspace/catalog/external_locations.rst b/docs/workspace/catalog/external_locations.rst index 0d7447b2c..acaac095d 100644 --- a/docs/workspace/catalog/external_locations.rst +++ b/docs/workspace/catalog/external_locations.rst @@ -30,22 +30,20 @@ w = WorkspaceClient() - storage_credential = w.storage_credentials.create( + credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), - comment="created via SDK", ) - external_location = w.external_locations.create( + created = w.external_locations.create( name=f"sdk-{time.time_ns()}", - credential_name=storage_credential.name, - comment="created via SDK", - url="s3://" + os.environ["TEST_BUCKET"] + "/" + f"sdk-{time.time_ns()}", + credential_name=credential.name, + url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"), ) # cleanup - w.storage_credentials.delete(name=storage_credential.name) - w.external_locations.delete(name=external_location.name) + w.storage_credentials.delete(name=credential.name) + w.external_locations.delete(name=created.name) Creates a new external location entry in the metastore. The caller must be a metastore admin or have the **CREATE_EXTERNAL_LOCATION** privilege on both the metastore and the associated storage @@ -142,10 +140,11 @@ .. code-block:: from databricks.sdk import WorkspaceClient + from databricks.sdk.service import catalog w = WorkspaceClient() - all = w.external_locations.list() + all = w.external_locations.list(catalog.ListExternalLocationsRequest()) Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore. The caller must be a metastore admin, the owner of the external location, or a user that has some privilege on diff --git a/docs/workspace/catalog/schemas.rst b/docs/workspace/catalog/schemas.rst index fd1479c78..719d5a156 100644 --- a/docs/workspace/catalog/schemas.rst +++ b/docs/workspace/catalog/schemas.rst @@ -22,13 +22,13 @@ w = WorkspaceClient() - new_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}") + created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}") - created = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=new_catalog.name) + created_schema = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=created_catalog.name) # cleanup - w.catalogs.delete(name=new_catalog.name, force=True) - w.schemas.delete(full_name=created.full_name) + w.catalogs.delete(name=created_catalog.name, force=True) + w.schemas.delete(full_name=created_schema.full_name) Creates a new schema for catalog in the Metastore. The caller must be a metastore admin, or have the **CREATE_SCHEMA** privilege in the parent catalog. diff --git a/docs/workspace/catalog/storage_credentials.rst b/docs/workspace/catalog/storage_credentials.rst index c16a8e093..2134ea9c4 100644 --- a/docs/workspace/catalog/storage_credentials.rst +++ b/docs/workspace/catalog/storage_credentials.rst @@ -30,14 +30,13 @@ w = WorkspaceClient() - storage_credential = w.storage_credentials.create( + credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), - comment="created via SDK", ) # cleanup - w.storage_credentials.delete(name=storage_credential.name) + w.storage_credentials.delete(name=credential.name) Creates a new storage credential. @@ -99,13 +98,13 @@ created = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) - by_name = w.storage_credentials.get(get=created.name) + by_name = w.storage_credentials.get(name=created.name) # cleanup - w.storage_credentials.delete(delete=created.name) + w.storage_credentials.delete(name=created.name) Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the storage credential, or have some permission on the storage credential. diff --git a/docs/workspace/catalog/tables.rst b/docs/workspace/catalog/tables.rst index 089b1b7f1..009b4bbd2 100644 --- a/docs/workspace/catalog/tables.rst +++ b/docs/workspace/catalog/tables.rst @@ -156,7 +156,7 @@ created_schema = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=created_catalog.name) - all_tables = w.tables.list(catalog_name=created_catalog.name, schema_name=created_schema.name) + summaries = w.tables.list_summaries(catalog_name=created_catalog.name, schema_name_pattern=created_schema.name) # cleanup w.schemas.delete(full_name=created_schema.full_name) diff --git a/docs/workspace/compute/clusters.rst b/docs/workspace/compute/clusters.rst index 8619a5e9a..463e34d0a 100644 --- a/docs/workspace/compute/clusters.rst +++ b/docs/workspace/compute/clusters.rst @@ -645,11 +645,10 @@ .. code-block:: from databricks.sdk import WorkspaceClient - from databricks.sdk.service import compute w = WorkspaceClient() - all = w.clusters.list(compute.ListClustersRequest()) + nodes = w.clusters.list_node_types() Return information about all pinned and active clusters, and all clusters terminated within the last 30 days. Clusters terminated prior to this period are not included. diff --git a/docs/workspace/iam/permissions.rst b/docs/workspace/iam/permissions.rst index ea24afd1a..15524c53e 100644 --- a/docs/workspace/iam/permissions.rst +++ b/docs/workspace/iam/permissions.rst @@ -44,7 +44,7 @@ obj = w.workspace.get_status(path=notebook_path) - _ = w.permissions.get(request_object_type="notebooks", request_object_id="%d" % (obj.object_id)) + levels = w.permissions.get_permission_levels(request_object_type="notebooks", request_object_id="%d" % (obj.object_id)) Gets the permissions of an object. Objects can inherit permissions from their parent objects or root object. diff --git a/docs/workspace/ml/model_registry.rst b/docs/workspace/ml/model_registry.rst index 601ffd87d..9a6c8f286 100644 --- a/docs/workspace/ml/model_registry.rst +++ b/docs/workspace/ml/model_registry.rst @@ -91,8 +91,6 @@ w = WorkspaceClient() model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") - - created = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") Creates a new registered model with the name specified in the request body. Throws `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists. @@ -736,14 +734,13 @@ w = WorkspaceClient() - model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") + created = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") - created = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") + model = w.model_registry.get_model(name=created.registered_model.name) - w.model_registry.update_model_version( + w.model_registry.update_model( + name=model.registered_model_databricks.name, description=f"sdk-{time.time_ns()}", - name=created.model_version.name, - version=created.model_version.version, ) Updates a registered model. diff --git a/docs/workspace/sql/queries.rst b/docs/workspace/sql/queries.rst index f0081b3f2..0dfb63fbf 100644 --- a/docs/workspace/sql/queries.rst +++ b/docs/workspace/sql/queries.rst @@ -29,7 +29,7 @@ display_name=f"sdk-{time.time_ns()}", warehouse_id=srcs[0].warehouse_id, description="test query from Go SDK", - query_text="SHOW TABLES", + query_text="SELECT 1", ) ) diff --git a/docs/workspace/workspace/workspace.rst b/docs/workspace/workspace/workspace.rst index 63643d696..521f3b97f 100644 --- a/docs/workspace/workspace/workspace.rst +++ b/docs/workspace/workspace/workspace.rst @@ -232,14 +232,16 @@ .. code-block:: + import os + import time + from databricks.sdk import WorkspaceClient w = WorkspaceClient() - names = [] - for i in w.workspace.list(f"/Users/{w.current_user.me().user_name}", recursive=True): - names.append(i.path) - assert len(names) > 0 + notebook = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}" + + objects = w.workspace.list(path=os.path.dirname(notebook)) List workspace objects