diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha
index a68714d08..9834ec958 100644
--- a/.codegen/_openapi_sha
+++ b/.codegen/_openapi_sha
@@ -1 +1 @@
-3a7fe4deb693ca98d89b044116aaf008efd895a5
\ No newline at end of file
+3ae6f76120079424c8654263eafbc30ec0551854
\ No newline at end of file
diff --git a/.gitattributes b/.gitattributes
index e0c0733ba..c4d456976 100755
--- a/.gitattributes
+++ b/.gitattributes
@@ -203,6 +203,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsSer
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CloudflareApiToken.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnInfo.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnMask.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnMaskOptions.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnRelationship.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnTypeName.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionDependency.java linguist-generated=true
@@ -228,11 +229,13 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetas
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastoreAssignment.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMonitor.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreatePolicyRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRegisteredModelRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRequestExternalLineage.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSchema.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredential.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateTableConstraint.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateTableRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContent.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialDependency.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfo.java linguist-generated=true
@@ -261,6 +264,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteMetas
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteModelVersionRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteMonitorResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteOnlineTableRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeletePolicyRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeletePolicyResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteQualityMonitorRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRegisteredModelRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRequestExternalLineage.java linguist-generated=true
@@ -310,6 +315,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalMet
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FailedStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FileEventQueue.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ForeignKeyConstraint.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionArgument.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionDependency.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfo.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoParameterStyle.java linguist-generated=true
@@ -325,6 +331,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsIm
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsService.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpOauthToken.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpPubsub.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryPathCredentialRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryPathCredentialResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialAzureOptions.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialGcpOptions.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialRequest.java linguist-generated=true
@@ -350,6 +358,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastor
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetModelVersionRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetOnlineTableRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetPermissionsResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetPolicyRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQualityMonitorRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQuotaRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQuotaResponse.java linguist-generated=true
@@ -387,6 +396,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetasto
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListPoliciesRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListPoliciesResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListQuotasRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListQuotasResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRefreshesRequest.java linguist-generated=true
@@ -404,6 +415,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesR
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumesRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumesResponseContent.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MatchColumn.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MatchType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreAssignment.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreInfo.java linguist-generated=true
@@ -445,8 +457,14 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTable
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OptionSpec.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OptionSpecOauthStage.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OptionSpecOptionType.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PathOperation.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsChange.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PipelineProgress.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PoliciesAPI.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PoliciesImpl.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PoliciesService.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PolicyInfo.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PolicyType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrimaryKeyConstraint.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Privilege.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrivilegeAssignment.java linguist-generated=true
@@ -469,6 +487,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredM
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ResourceQuotasAPI.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ResourceQuotasImpl.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ResourceQuotasService.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RowFilterOptions.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RunRefreshRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java linguist-generated=true
@@ -506,6 +525,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesImpl.
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TagKeyValue.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryCredentials.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsAPI.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsImpl.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsService.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsAPI.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsImpl.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsService.java linguist-generated=true
@@ -525,6 +547,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateModel
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMonitor.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePermissions.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePermissionsResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePolicyRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRegisteredModelRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRequestExternalLineage.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSchema.java linguist-generated=true
@@ -943,10 +966,14 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabas
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseInstanceRoleRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseTableRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetSyncedDatabaseTableRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseCatalogsRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseCatalogsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstanceRolesRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstanceRolesResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListSyncedDatabaseTablesRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListSyncedDatabaseTablesResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ProvisioningInfoState.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ProvisioningPhase.java linguist-generated=true
@@ -964,7 +991,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTabl
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableState.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableTriggeredUpdateStatus.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseCatalogRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateSyncedDatabaseTableRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlock.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Close.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Create.java linguist-generated=true
diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md
index 20502a976..8232a368b 100644
--- a/NEXT_CHANGELOG.md
+++ b/NEXT_CHANGELOG.md
@@ -16,3 +16,14 @@
### Internal Changes
### API Changes
+* Added `workspaceClient.policies()` service and `workspaceClient.temporaryPathCredentials()` service.
+* Added `create()` method for `workspaceClient.tables()` service.
+* Added `listDatabaseCatalogs()`, `listSyncedDatabaseTables()`, `updateDatabaseCatalog()` and `updateSyncedDatabaseTable()` methods for `workspaceClient.database()` service.
+* Added `firstOnDemand` field for `com.databricks.sdk.service.compute.GcpAttributes`.
+* Added `usagePolicyId` field for `com.databricks.sdk.service.jobs.CreateJob`.
+* Added `usagePolicyId` field for `com.databricks.sdk.service.jobs.JobSettings`.
+* Added `usagePolicyId` field for `com.databricks.sdk.service.jobs.SubmitRun`.
+* Added `clientRequestId` and `usageContext` fields for `com.databricks.sdk.service.serving.QueryEndpointInput`.
+* Added `channelId`, `channelIdSet`, `oauthToken` and `oauthTokenSet` fields for `com.databricks.sdk.service.settings.SlackConfig`.
+* Added `SNAPSHOT` enum value for `com.databricks.sdk.service.ml.PublishSpecPublishMode`.
+* [Breaking] Changed `publishMode` field for `com.databricks.sdk.service.ml.PublishSpec` to be required.
\ No newline at end of file
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
index 2c214e723..51628d922 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
@@ -36,6 +36,8 @@
import com.databricks.sdk.service.catalog.ModelVersionsService;
import com.databricks.sdk.service.catalog.OnlineTablesAPI;
import com.databricks.sdk.service.catalog.OnlineTablesService;
+import com.databricks.sdk.service.catalog.PoliciesAPI;
+import com.databricks.sdk.service.catalog.PoliciesService;
import com.databricks.sdk.service.catalog.QualityMonitorsAPI;
import com.databricks.sdk.service.catalog.QualityMonitorsService;
import com.databricks.sdk.service.catalog.RegisteredModelsAPI;
@@ -52,6 +54,8 @@
import com.databricks.sdk.service.catalog.TableConstraintsService;
import com.databricks.sdk.service.catalog.TablesAPI;
import com.databricks.sdk.service.catalog.TablesService;
+import com.databricks.sdk.service.catalog.TemporaryPathCredentialsAPI;
+import com.databricks.sdk.service.catalog.TemporaryPathCredentialsService;
import com.databricks.sdk.service.catalog.TemporaryTableCredentialsAPI;
import com.databricks.sdk.service.catalog.TemporaryTableCredentialsService;
import com.databricks.sdk.service.catalog.VolumesAPI;
@@ -294,6 +298,7 @@ public class WorkspaceClient {
private PermissionMigrationAPI permissionMigrationAPI;
private PermissionsAPI permissionsAPI;
private PipelinesAPI pipelinesAPI;
+ private PoliciesAPI policiesAPI;
private PolicyComplianceForClustersAPI policyComplianceForClustersAPI;
private PolicyComplianceForJobsAPI policyComplianceForJobsAPI;
private PolicyFamiliesAPI policyFamiliesAPI;
@@ -332,6 +337,7 @@ public class WorkspaceClient {
private SystemSchemasAPI systemSchemasAPI;
private TableConstraintsAPI tableConstraintsAPI;
private TablesAPI tablesAPI;
+ private TemporaryPathCredentialsAPI temporaryPathCredentialsAPI;
private TemporaryTableCredentialsAPI temporaryTableCredentialsAPI;
private TokenManagementAPI tokenManagementAPI;
private TokensAPI tokensAPI;
@@ -412,6 +418,7 @@ public WorkspaceClient(DatabricksConfig config) {
permissionMigrationAPI = new PermissionMigrationAPI(apiClient);
permissionsAPI = new PermissionsAPI(apiClient);
pipelinesAPI = new PipelinesAPI(apiClient);
+ policiesAPI = new PoliciesAPI(apiClient);
policyComplianceForClustersAPI = new PolicyComplianceForClustersAPI(apiClient);
policyComplianceForJobsAPI = new PolicyComplianceForJobsAPI(apiClient);
policyFamiliesAPI = new PolicyFamiliesAPI(apiClient);
@@ -451,6 +458,7 @@ public WorkspaceClient(DatabricksConfig config) {
systemSchemasAPI = new SystemSchemasAPI(apiClient);
tableConstraintsAPI = new TableConstraintsAPI(apiClient);
tablesAPI = new TablesAPI(apiClient);
+ temporaryPathCredentialsAPI = new TemporaryPathCredentialsAPI(apiClient);
temporaryTableCredentialsAPI = new TemporaryTableCredentialsAPI(apiClient);
tokenManagementAPI = new TokenManagementAPI(apiClient);
tokensAPI = new TokensAPI(apiClient);
@@ -1213,6 +1221,18 @@ public PipelinesAPI pipelines() {
return pipelinesAPI;
}
+ /**
+ * Attribute-Based Access Control (ABAC) provides high leverage governance for enforcing
+ * compliance policies in Unity Catalog. With ABAC policies, access is controlled in a
+ * hierarchical and scalable manner, based on data attributes rather than specific resources,
+ * enabling more flexible and comprehensive access control. ABAC policies in Unity Catalog support
+ * conditions on securable properties, governance tags, and environment contexts. Callers must
+ * have the `MANAGE` privilege on a securable to view, create, update, or delete ABAC policies.
+ */
+ public PoliciesAPI policies() {
+ return policiesAPI;
+ }
+
/**
* The policy compliance APIs allow you to view and manage the policy compliance status of
* clusters in your workspace.
@@ -1761,20 +1781,49 @@ public TablesAPI tables() {
return tablesAPI;
}
+ /**
+ * Temporary Path Credentials refer to short-lived, downscoped credentials used to access external
+ * cloud storage locations registered in Databricks. These credentials are employed to provide
+ * secure and time-limited access to data in cloud environments such as AWS, Azure, and Google
+ * Cloud. Each cloud provider has its own type of credentials: AWS uses temporary session tokens
+ * via AWS Security Token Service (STS), Azure utilizes Shared Access Signatures (SAS) for its
+ * data storage services, and Google Cloud supports temporary credentials through OAuth 2.0.
+ *
+ *
Temporary path credentials ensure that data access is limited in scope and duration,
+ * reducing the risk of unauthorized access or misuse. To use the temporary path credentials API,
+ * a metastore admin needs to enable the external_access_enabled flag (off by default) at the
+ * metastore level. A user needs to be granted the EXTERNAL USE LOCATION permission by external
+ * location owner. For requests on existing external tables, user also needs to be granted the
+ * EXTERNAL USE SCHEMA permission at the schema level by catalog admin.
+ *
+ *
Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by
+ * catalog admin explicitly and is not included in schema ownership or ALL PRIVILEGES on the
+ * schema for security reasons. Similarly, EXTERNAL USE LOCATION is an external location level
+ * permission that can only be granted by external location owner explicitly and is not included
+ * in external location ownership or ALL PRIVILEGES on the external location for security reasons.
+ *
+ *
This API only supports temporary path credentials for external locations and external
+ * tables, and volumes will be supported in the future.
+ */
+ public TemporaryPathCredentialsAPI temporaryPathCredentials() {
+ return temporaryPathCredentialsAPI;
+ }
+
/**
* Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud
- * storage locationswhere table data is stored in Databricks. These credentials are employed to
- * provide secure and time-limitedaccess to data in cloud environments such as AWS, Azure, and
- * Google Cloud. Each cloud provider has its own typeof credentials: AWS uses temporary session
- * tokens via AWS Security Token Service (STS), Azure utilizesShared Access Signatures (SAS) for
- * its data storage services, and Google Cloud supports temporary credentialsthrough OAuth
- * 2.0.Temporary table credentials ensure that data access is limited in scope and duration,
- * reducing the risk ofunauthorized access or misuse. To use the temporary table credentials API,
+ * storage locations where table data is stored in Databricks. These credentials are employed to
+ * provide secure and time-limited access to data in cloud environments such as AWS, Azure, and
+ * Google Cloud. Each cloud provider has its own type of credentials: AWS uses temporary session
+ * tokens via AWS Security Token Service (STS), Azure utilizes Shared Access Signatures (SAS) for
+ * its data storage services, and Google Cloud supports temporary credentials through OAuth 2.0.
+ *
+ *
Temporary table credentials ensure that data access is limited in scope and duration,
+ * reducing the risk of unauthorized access or misuse. To use the temporary table credentials API,
* a metastore admin needs to enable the external_access_enabled flag (off by default) at the
* metastore level, and user needs to be granted the EXTERNAL USE SCHEMA permission at the schema
* level by catalog admin. Note that EXTERNAL USE SCHEMA is a schema level permission that can
* only be granted by catalog admin explicitly and is not included in schema ownership or ALL
- * PRIVILEGES on the schema for security reason.
+ * PRIVILEGES on the schema for security reasons.
*/
public TemporaryTableCredentialsAPI temporaryTableCredentials() {
return temporaryTableCredentialsAPI;
@@ -2582,6 +2631,17 @@ public WorkspaceClient withPipelinesAPI(PipelinesAPI pipelines) {
return this;
}
+ /** Replace the default PoliciesService with a custom implementation. */
+ public WorkspaceClient withPoliciesImpl(PoliciesService policies) {
+ return this.withPoliciesAPI(new PoliciesAPI(policies));
+ }
+
+ /** Replace the default PoliciesAPI with a custom implementation. */
+ public WorkspaceClient withPoliciesAPI(PoliciesAPI policies) {
+ this.policiesAPI = policies;
+ return this;
+ }
+
/** Replace the default PolicyComplianceForClustersService with a custom implementation. */
public WorkspaceClient withPolicyComplianceForClustersImpl(
PolicyComplianceForClustersService policyComplianceForClusters) {
@@ -3031,6 +3091,20 @@ public WorkspaceClient withTablesAPI(TablesAPI tables) {
return this;
}
+ /** Replace the default TemporaryPathCredentialsService with a custom implementation. */
+ public WorkspaceClient withTemporaryPathCredentialsImpl(
+ TemporaryPathCredentialsService temporaryPathCredentials) {
+ return this.withTemporaryPathCredentialsAPI(
+ new TemporaryPathCredentialsAPI(temporaryPathCredentials));
+ }
+
+ /** Replace the default TemporaryPathCredentialsAPI with a custom implementation. */
+ public WorkspaceClient withTemporaryPathCredentialsAPI(
+ TemporaryPathCredentialsAPI temporaryPathCredentials) {
+ this.temporaryPathCredentialsAPI = temporaryPathCredentials;
+ return this;
+ }
+
/** Replace the default TemporaryTableCredentialsService with a custom implementation. */
public WorkspaceClient withTemporaryTableCredentialsImpl(
TemporaryTableCredentialsService temporaryTableCredentials) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java
index 1cab911c6..97b6f3b19 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java
@@ -21,10 +21,7 @@ public class App {
@JsonProperty("app_status")
private ApplicationStatus appStatus;
- /**
- * TODO: Deprecate this field after serverless entitlements are released to all prod stages and
- * the new usage_policy_id is properly populated and used.
- */
+ /** */
@JsonProperty("budget_policy_id")
private String budgetPolicyId;
@@ -51,10 +48,7 @@ public class App {
@JsonProperty("description")
private String description;
- /**
- * TODO: Deprecate this field after serverless entitlements are released to all prod stages and
- * the new usage_policy_id is properly populated and used.
- */
+ /** */
@JsonProperty("effective_budget_policy_id")
private String effectiveBudgetPolicyId;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnMaskOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnMaskOptions.java
new file mode 100755
index 000000000..e3e942aa9
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnMaskOptions.java
@@ -0,0 +1,87 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class ColumnMaskOptions {
+ /**
+ * The fully qualified name of the column mask function. The function is called on each row of the
+ * target table. The function's first argument and its return type should match the type of the
+ * masked column. Required on create and update.
+ */
+ @JsonProperty("function_name")
+ private String functionName;
+
+ /**
+ * The alias of the column to be masked. The alias must refer to one of matched columns. The
+ * values of the column is passed to the column mask function as the first argument. Required on
+ * create and update.
+ */
+ @JsonProperty("on_column")
+ private String onColumn;
+
+ /**
+ * Optional list of column aliases or constant literals to be passed as additional arguments to
+ * the column mask function. The type of each column should match the positional argument of the
+ * column mask function.
+ */
+ @JsonProperty("using")
+ private Collection using;
+
+ public ColumnMaskOptions setFunctionName(String functionName) {
+ this.functionName = functionName;
+ return this;
+ }
+
+ public String getFunctionName() {
+ return functionName;
+ }
+
+ public ColumnMaskOptions setOnColumn(String onColumn) {
+ this.onColumn = onColumn;
+ return this;
+ }
+
+ public String getOnColumn() {
+ return onColumn;
+ }
+
+ public ColumnMaskOptions setUsing(Collection using) {
+ this.using = using;
+ return this;
+ }
+
+ public Collection getUsing() {
+ return using;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ColumnMaskOptions that = (ColumnMaskOptions) o;
+ return Objects.equals(functionName, that.functionName)
+ && Objects.equals(onColumn, that.onColumn)
+ && Objects.equals(using, that.using);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(functionName, onColumn, using);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ColumnMaskOptions.class)
+ .add("functionName", functionName)
+ .add("onColumn", onColumn)
+ .add("using", using)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreatePolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreatePolicyRequest.java
new file mode 100755
index 000000000..7d89c77ba
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreatePolicyRequest.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreatePolicyRequest {
+ /** Required. The policy to create. */
+ @JsonProperty("policy_info")
+ private PolicyInfo policyInfo;
+
+ public CreatePolicyRequest setPolicyInfo(PolicyInfo policyInfo) {
+ this.policyInfo = policyInfo;
+ return this;
+ }
+
+ public PolicyInfo getPolicyInfo() {
+ return policyInfo;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreatePolicyRequest that = (CreatePolicyRequest) o;
+ return Objects.equals(policyInfo, that.policyInfo);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(policyInfo);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreatePolicyRequest.class).add("policyInfo", policyInfo).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateTableRequest.java
new file mode 100755
index 000000000..efd4bdcf9
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateTableRequest.java
@@ -0,0 +1,159 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Map;
+import java.util.Objects;
+
+@Generated
+public class CreateTableRequest {
+ /** Name of parent catalog. */
+ @JsonProperty("catalog_name")
+ private String catalogName;
+
+ /** The array of __ColumnInfo__ definitions of the table's columns. */
+ @JsonProperty("columns")
+ private Collection columns;
+
+ /** */
+ @JsonProperty("data_source_format")
+ private DataSourceFormat dataSourceFormat;
+
+ /** Name of table, relative to parent schema. */
+ @JsonProperty("name")
+ private String name;
+
+ /** A map of key-value properties attached to the securable. */
+ @JsonProperty("properties")
+ private Map properties;
+
+ /** Name of parent schema relative to its parent catalog. */
+ @JsonProperty("schema_name")
+ private String schemaName;
+
+ /** Storage root URL for table (for **MANAGED**, **EXTERNAL** tables). */
+ @JsonProperty("storage_location")
+ private String storageLocation;
+
+ /** */
+ @JsonProperty("table_type")
+ private TableType tableType;
+
+ public CreateTableRequest setCatalogName(String catalogName) {
+ this.catalogName = catalogName;
+ return this;
+ }
+
+ public String getCatalogName() {
+ return catalogName;
+ }
+
+ public CreateTableRequest setColumns(Collection columns) {
+ this.columns = columns;
+ return this;
+ }
+
+ public Collection getColumns() {
+ return columns;
+ }
+
+ public CreateTableRequest setDataSourceFormat(DataSourceFormat dataSourceFormat) {
+ this.dataSourceFormat = dataSourceFormat;
+ return this;
+ }
+
+ public DataSourceFormat getDataSourceFormat() {
+ return dataSourceFormat;
+ }
+
+ public CreateTableRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public CreateTableRequest setProperties(Map properties) {
+ this.properties = properties;
+ return this;
+ }
+
+ public Map getProperties() {
+ return properties;
+ }
+
+ public CreateTableRequest setSchemaName(String schemaName) {
+ this.schemaName = schemaName;
+ return this;
+ }
+
+ public String getSchemaName() {
+ return schemaName;
+ }
+
+ public CreateTableRequest setStorageLocation(String storageLocation) {
+ this.storageLocation = storageLocation;
+ return this;
+ }
+
+ public String getStorageLocation() {
+ return storageLocation;
+ }
+
+ public CreateTableRequest setTableType(TableType tableType) {
+ this.tableType = tableType;
+ return this;
+ }
+
+ public TableType getTableType() {
+ return tableType;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateTableRequest that = (CreateTableRequest) o;
+ return Objects.equals(catalogName, that.catalogName)
+ && Objects.equals(columns, that.columns)
+ && Objects.equals(dataSourceFormat, that.dataSourceFormat)
+ && Objects.equals(name, that.name)
+ && Objects.equals(properties, that.properties)
+ && Objects.equals(schemaName, that.schemaName)
+ && Objects.equals(storageLocation, that.storageLocation)
+ && Objects.equals(tableType, that.tableType);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ catalogName,
+ columns,
+ dataSourceFormat,
+ name,
+ properties,
+ schemaName,
+ storageLocation,
+ tableType);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateTableRequest.class)
+ .add("catalogName", catalogName)
+ .add("columns", columns)
+ .add("dataSourceFormat", dataSourceFormat)
+ .add("name", name)
+ .add("properties", properties)
+ .add("schemaName", schemaName)
+ .add("storageLocation", storageLocation)
+ .add("tableType", tableType)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeletePolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeletePolicyRequest.java
new file mode 100755
index 000000000..39937a005
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeletePolicyRequest.java
@@ -0,0 +1,71 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeletePolicyRequest {
+ /** Required. The name of the policy to delete */
+ @JsonIgnore private String name;
+
+ /** Required. The fully qualified name of the securable to delete the policy from. */
+ @JsonIgnore private String onSecurableFullname;
+
+ /** Required. The type of the securable to delete the policy from. */
+ @JsonIgnore private String onSecurableType;
+
+ public DeletePolicyRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public DeletePolicyRequest setOnSecurableFullname(String onSecurableFullname) {
+ this.onSecurableFullname = onSecurableFullname;
+ return this;
+ }
+
+ public String getOnSecurableFullname() {
+ return onSecurableFullname;
+ }
+
+ public DeletePolicyRequest setOnSecurableType(String onSecurableType) {
+ this.onSecurableType = onSecurableType;
+ return this;
+ }
+
+ public String getOnSecurableType() {
+ return onSecurableType;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeletePolicyRequest that = (DeletePolicyRequest) o;
+ return Objects.equals(name, that.name)
+ && Objects.equals(onSecurableFullname, that.onSecurableFullname)
+ && Objects.equals(onSecurableType, that.onSecurableType);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name, onSecurableFullname, onSecurableType);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeletePolicyRequest.class)
+ .add("name", name)
+ .add("onSecurableFullname", onSecurableFullname)
+ .add("onSecurableType", onSecurableType)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeletePolicyResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeletePolicyResponse.java
new file mode 100755
index 000000000..09348b924
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeletePolicyResponse.java
@@ -0,0 +1,28 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+@Generated
+public class DeletePolicyResponse {
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash();
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeletePolicyResponse.class).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionArgument.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionArgument.java
new file mode 100755
index 000000000..665e0ee05
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionArgument.java
@@ -0,0 +1,58 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class FunctionArgument {
+ /** The alias of a matched column. */
+ @JsonProperty("alias")
+ private String alias;
+
+ /** A constant literal. */
+ @JsonProperty("constant")
+ private String constant;
+
+ public FunctionArgument setAlias(String alias) {
+ this.alias = alias;
+ return this;
+ }
+
+ public String getAlias() {
+ return alias;
+ }
+
+ public FunctionArgument setConstant(String constant) {
+ this.constant = constant;
+ return this;
+ }
+
+ public String getConstant() {
+ return constant;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ FunctionArgument that = (FunctionArgument) o;
+ return Objects.equals(alias, that.alias) && Objects.equals(constant, that.constant);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(alias, constant);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(FunctionArgument.class)
+ .add("alias", alias)
+ .add("constant", constant)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryPathCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryPathCredentialRequest.java
new file mode 100755
index 000000000..ba1a2c00f
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryPathCredentialRequest.java
@@ -0,0 +1,78 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class GenerateTemporaryPathCredentialRequest {
+ /**
+ * Optional. When set to true, the service will not validate that the generated credentials can
+ * perform write operations, therefore no new paths will be created and the response will not
+ * contain valid credentials. Defaults to false.
+ */
+ @JsonProperty("dry_run")
+ private Boolean dryRun;
+
+ /** The operation being performed on the path. */
+ @JsonProperty("operation")
+ private PathOperation operation;
+
+ /** URL for path-based access. */
+ @JsonProperty("url")
+ private String url;
+
+ public GenerateTemporaryPathCredentialRequest setDryRun(Boolean dryRun) {
+ this.dryRun = dryRun;
+ return this;
+ }
+
+ public Boolean getDryRun() {
+ return dryRun;
+ }
+
+ public GenerateTemporaryPathCredentialRequest setOperation(PathOperation operation) {
+ this.operation = operation;
+ return this;
+ }
+
+ public PathOperation getOperation() {
+ return operation;
+ }
+
+ public GenerateTemporaryPathCredentialRequest setUrl(String url) {
+ this.url = url;
+ return this;
+ }
+
+ public String getUrl() {
+ return url;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenerateTemporaryPathCredentialRequest that = (GenerateTemporaryPathCredentialRequest) o;
+ return Objects.equals(dryRun, that.dryRun)
+ && Objects.equals(operation, that.operation)
+ && Objects.equals(url, that.url);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(dryRun, operation, url);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenerateTemporaryPathCredentialRequest.class)
+ .add("dryRun", dryRun)
+ .add("operation", operation)
+ .add("url", url)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryPathCredentialResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryPathCredentialResponse.java
new file mode 100755
index 000000000..add0089ea
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryPathCredentialResponse.java
@@ -0,0 +1,147 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class GenerateTemporaryPathCredentialResponse {
+ /** */
+ @JsonProperty("aws_temp_credentials")
+ private AwsCredentials awsTempCredentials;
+
+ /** */
+ @JsonProperty("azure_aad")
+ private AzureActiveDirectoryToken azureAad;
+
+ /** */
+ @JsonProperty("azure_user_delegation_sas")
+ private AzureUserDelegationSas azureUserDelegationSas;
+
+ /**
+ * Server time when the credential will expire, in epoch milliseconds. The API client is advised
+ * to cache the credential given this expiration time.
+ */
+ @JsonProperty("expiration_time")
+ private Long expirationTime;
+
+ /** */
+ @JsonProperty("gcp_oauth_token")
+ private GcpOauthToken gcpOauthToken;
+
+ /** */
+ @JsonProperty("r2_temp_credentials")
+ private R2Credentials r2TempCredentials;
+
+ /** The URL of the storage path accessible by the temporary credential. */
+ @JsonProperty("url")
+ private String url;
+
+ public GenerateTemporaryPathCredentialResponse setAwsTempCredentials(
+ AwsCredentials awsTempCredentials) {
+ this.awsTempCredentials = awsTempCredentials;
+ return this;
+ }
+
+ public AwsCredentials getAwsTempCredentials() {
+ return awsTempCredentials;
+ }
+
+ public GenerateTemporaryPathCredentialResponse setAzureAad(AzureActiveDirectoryToken azureAad) {
+ this.azureAad = azureAad;
+ return this;
+ }
+
+ public AzureActiveDirectoryToken getAzureAad() {
+ return azureAad;
+ }
+
+ public GenerateTemporaryPathCredentialResponse setAzureUserDelegationSas(
+ AzureUserDelegationSas azureUserDelegationSas) {
+ this.azureUserDelegationSas = azureUserDelegationSas;
+ return this;
+ }
+
+ public AzureUserDelegationSas getAzureUserDelegationSas() {
+ return azureUserDelegationSas;
+ }
+
+ public GenerateTemporaryPathCredentialResponse setExpirationTime(Long expirationTime) {
+ this.expirationTime = expirationTime;
+ return this;
+ }
+
+ public Long getExpirationTime() {
+ return expirationTime;
+ }
+
+ public GenerateTemporaryPathCredentialResponse setGcpOauthToken(GcpOauthToken gcpOauthToken) {
+ this.gcpOauthToken = gcpOauthToken;
+ return this;
+ }
+
+ public GcpOauthToken getGcpOauthToken() {
+ return gcpOauthToken;
+ }
+
+ public GenerateTemporaryPathCredentialResponse setR2TempCredentials(
+ R2Credentials r2TempCredentials) {
+ this.r2TempCredentials = r2TempCredentials;
+ return this;
+ }
+
+ public R2Credentials getR2TempCredentials() {
+ return r2TempCredentials;
+ }
+
+ public GenerateTemporaryPathCredentialResponse setUrl(String url) {
+ this.url = url;
+ return this;
+ }
+
+ public String getUrl() {
+ return url;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenerateTemporaryPathCredentialResponse that = (GenerateTemporaryPathCredentialResponse) o;
+ return Objects.equals(awsTempCredentials, that.awsTempCredentials)
+ && Objects.equals(azureAad, that.azureAad)
+ && Objects.equals(azureUserDelegationSas, that.azureUserDelegationSas)
+ && Objects.equals(expirationTime, that.expirationTime)
+ && Objects.equals(gcpOauthToken, that.gcpOauthToken)
+ && Objects.equals(r2TempCredentials, that.r2TempCredentials)
+ && Objects.equals(url, that.url);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ awsTempCredentials,
+ azureAad,
+ azureUserDelegationSas,
+ expirationTime,
+ gcpOauthToken,
+ r2TempCredentials,
+ url);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenerateTemporaryPathCredentialResponse.class)
+ .add("awsTempCredentials", awsTempCredentials)
+ .add("azureAad", azureAad)
+ .add("azureUserDelegationSas", azureUserDelegationSas)
+ .add("expirationTime", expirationTime)
+ .add("gcpOauthToken", gcpOauthToken)
+ .add("r2TempCredentials", r2TempCredentials)
+ .add("url", url)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetPolicyRequest.java
new file mode 100755
index 000000000..242d5b851
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetPolicyRequest.java
@@ -0,0 +1,71 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetPolicyRequest {
+ /** Required. The name of the policy to retrieve. */
+ @JsonIgnore private String name;
+
+ /** Required. The fully qualified name of securable to retrieve policy for. */
+ @JsonIgnore private String onSecurableFullname;
+
+ /** Required. The type of the securable to retrieve the policy for. */
+ @JsonIgnore private String onSecurableType;
+
+ public GetPolicyRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public GetPolicyRequest setOnSecurableFullname(String onSecurableFullname) {
+ this.onSecurableFullname = onSecurableFullname;
+ return this;
+ }
+
+ public String getOnSecurableFullname() {
+ return onSecurableFullname;
+ }
+
+ public GetPolicyRequest setOnSecurableType(String onSecurableType) {
+ this.onSecurableType = onSecurableType;
+ return this;
+ }
+
+ public String getOnSecurableType() {
+ return onSecurableType;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetPolicyRequest that = (GetPolicyRequest) o;
+ return Objects.equals(name, that.name)
+ && Objects.equals(onSecurableFullname, that.onSecurableFullname)
+ && Objects.equals(onSecurableType, that.onSecurableType);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name, onSecurableFullname, onSecurableType);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetPolicyRequest.class)
+ .add("name", name)
+ .add("onSecurableFullname", onSecurableFullname)
+ .add("onSecurableType", onSecurableType)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListPoliciesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListPoliciesRequest.java
new file mode 100755
index 000000000..444155d59
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListPoliciesRequest.java
@@ -0,0 +1,115 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListPoliciesRequest {
+ /**
+ * Optional. Whether to include policies defined on parent securables. By default, the inherited
+ * policies are not included.
+ */
+ @JsonIgnore
+ @QueryParam("include_inherited")
+ private Boolean includeInherited;
+
+ /**
+ * Optional. Maximum number of policies to return on a single page (page length). - When not set
+ * or set to 0, the page length is set to a server configured value (recommended); - When set to a
+ * value greater than 0, the page length is the minimum of this value and a server configured
+ * value;
+ */
+ @JsonIgnore
+ @QueryParam("max_results")
+ private Long maxResults;
+
+ /** Required. The fully qualified name of securable to list policies for. */
+ @JsonIgnore private String onSecurableFullname;
+
+ /** Required. The type of the securable to list policies for. */
+ @JsonIgnore private String onSecurableType;
+
+ /** Optional. Opaque pagination token to go to next page based on previous query. */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ public ListPoliciesRequest setIncludeInherited(Boolean includeInherited) {
+ this.includeInherited = includeInherited;
+ return this;
+ }
+
+ public Boolean getIncludeInherited() {
+ return includeInherited;
+ }
+
+ public ListPoliciesRequest setMaxResults(Long maxResults) {
+ this.maxResults = maxResults;
+ return this;
+ }
+
+ public Long getMaxResults() {
+ return maxResults;
+ }
+
+ public ListPoliciesRequest setOnSecurableFullname(String onSecurableFullname) {
+ this.onSecurableFullname = onSecurableFullname;
+ return this;
+ }
+
+ public String getOnSecurableFullname() {
+ return onSecurableFullname;
+ }
+
+ public ListPoliciesRequest setOnSecurableType(String onSecurableType) {
+ this.onSecurableType = onSecurableType;
+ return this;
+ }
+
+ public String getOnSecurableType() {
+ return onSecurableType;
+ }
+
+ public ListPoliciesRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListPoliciesRequest that = (ListPoliciesRequest) o;
+ return Objects.equals(includeInherited, that.includeInherited)
+ && Objects.equals(maxResults, that.maxResults)
+ && Objects.equals(onSecurableFullname, that.onSecurableFullname)
+ && Objects.equals(onSecurableType, that.onSecurableType)
+ && Objects.equals(pageToken, that.pageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ includeInherited, maxResults, onSecurableFullname, onSecurableType, pageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListPoliciesRequest.class)
+ .add("includeInherited", includeInherited)
+ .add("maxResults", maxResults)
+ .add("onSecurableFullname", onSecurableFullname)
+ .add("onSecurableType", onSecurableType)
+ .add("pageToken", pageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListPoliciesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListPoliciesResponse.java
new file mode 100755
index 000000000..7a2218723
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListPoliciesResponse.java
@@ -0,0 +1,63 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class ListPoliciesResponse {
+ /**
+ * Optional opaque token for continuing pagination. `page_token` should be set to this value for
+ * the next request to retrieve the next page of results.
+ */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ /** The list of retrieved policies. */
+ @JsonProperty("policies")
+ private Collection policies;
+
+ public ListPoliciesResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
+ public ListPoliciesResponse setPolicies(Collection policies) {
+ this.policies = policies;
+ return this;
+ }
+
+ public Collection getPolicies() {
+ return policies;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListPoliciesResponse that = (ListPoliciesResponse) o;
+ return Objects.equals(nextPageToken, that.nextPageToken)
+ && Objects.equals(policies, that.policies);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(nextPageToken, policies);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListPoliciesResponse.class)
+ .add("nextPageToken", nextPageToken)
+ .add("policies", policies)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MatchColumn.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MatchColumn.java
new file mode 100755
index 000000000..6bf29d0ad
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MatchColumn.java
@@ -0,0 +1,58 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class MatchColumn {
+ /** Optional alias of the matched column. */
+ @JsonProperty("alias")
+ private String alias;
+
+ /** The condition expression used to match a table column. */
+ @JsonProperty("condition")
+ private String condition;
+
+ public MatchColumn setAlias(String alias) {
+ this.alias = alias;
+ return this;
+ }
+
+ public String getAlias() {
+ return alias;
+ }
+
+ public MatchColumn setCondition(String condition) {
+ this.condition = condition;
+ return this;
+ }
+
+ public String getCondition() {
+ return condition;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ MatchColumn that = (MatchColumn) o;
+ return Objects.equals(alias, that.alias) && Objects.equals(condition, that.condition);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(alias, condition);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(MatchColumn.class)
+ .add("alias", alias)
+ .add("condition", condition)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PathOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PathOperation.java
new file mode 100755
index 000000000..0714fcb4a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PathOperation.java
@@ -0,0 +1,12 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum PathOperation {
+ PATH_CREATE_TABLE,
+ PATH_READ,
+ PATH_READ_WRITE,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PoliciesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PoliciesAPI.java
new file mode 100755
index 000000000..0f3b06b02
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PoliciesAPI.java
@@ -0,0 +1,102 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Attribute-Based Access Control (ABAC) provides high leverage governance for enforcing compliance
+ * policies in Unity Catalog. With ABAC policies, access is controlled in a hierarchical and
+ * scalable manner, based on data attributes rather than specific resources, enabling more flexible
+ * and comprehensive access control. ABAC policies in Unity Catalog support conditions on securable
+ * properties, governance tags, and environment contexts. Callers must have the `MANAGE` privilege
+ * on a securable to view, create, update, or delete ABAC policies.
+ */
+@Generated
+public class PoliciesAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(PoliciesAPI.class);
+
+ private final PoliciesService impl;
+
+ /** Regular-use constructor */
+ public PoliciesAPI(ApiClient apiClient) {
+ impl = new PoliciesImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public PoliciesAPI(PoliciesService mock) {
+ impl = mock;
+ }
+
+ /**
+ * Creates a new policy on a securable. The new policy applies to the securable and all its
+ * descendants.
+ */
+ public PolicyInfo createPolicy(CreatePolicyRequest request) {
+ return impl.createPolicy(request);
+ }
+
+ public DeletePolicyResponse deletePolicy(
+ String onSecurableType, String onSecurableFullname, String name) {
+ return deletePolicy(
+ new DeletePolicyRequest()
+ .setOnSecurableType(onSecurableType)
+ .setOnSecurableFullname(onSecurableFullname)
+ .setName(name));
+ }
+
+ /** Delete an ABAC policy defined on a securable. */
+ public DeletePolicyResponse deletePolicy(DeletePolicyRequest request) {
+ return impl.deletePolicy(request);
+ }
+
+ public PolicyInfo getPolicy(String onSecurableType, String onSecurableFullname, String name) {
+ return getPolicy(
+ new GetPolicyRequest()
+ .setOnSecurableType(onSecurableType)
+ .setOnSecurableFullname(onSecurableFullname)
+ .setName(name));
+ }
+
+ /** Get the policy definition on a securable */
+ public PolicyInfo getPolicy(GetPolicyRequest request) {
+ return impl.getPolicy(request);
+ }
+
+ public Iterable listPolicies(String onSecurableType, String onSecurableFullname) {
+ return listPolicies(
+ new ListPoliciesRequest()
+ .setOnSecurableType(onSecurableType)
+ .setOnSecurableFullname(onSecurableFullname));
+ }
+
+ /**
+ * List all policies defined on a securable. Optionally, the list can include inherited policies
+ * defined on the securable's parent schema or catalog.
+ */
+ public Iterable listPolicies(ListPoliciesRequest request) {
+ return new Paginator<>(
+ request,
+ impl::listPolicies,
+ ListPoliciesResponse::getPolicies,
+ response -> {
+ String token = response.getNextPageToken();
+ if (token == null || token.isEmpty()) {
+ return null;
+ }
+ return request.setPageToken(token);
+ });
+ }
+
+ /** Update an ABAC policy on a securable. */
+ public PolicyInfo updatePolicy(UpdatePolicyRequest request) {
+ return impl.updatePolicy(request);
+ }
+
+ public PoliciesService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PoliciesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PoliciesImpl.java
new file mode 100755
index 000000000..2d15757ff
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PoliciesImpl.java
@@ -0,0 +1,97 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of Policies */
+@Generated
+class PoliciesImpl implements PoliciesService {
+ private final ApiClient apiClient;
+
+ public PoliciesImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public PolicyInfo createPolicy(CreatePolicyRequest request) {
+ String path = "/api/2.1/unity-catalog/policies";
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request.getPolicyInfo()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, PolicyInfo.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public DeletePolicyResponse deletePolicy(DeletePolicyRequest request) {
+ String path =
+ String.format(
+ "/api/2.1/unity-catalog/policies/%s/%s/%s",
+ request.getOnSecurableType(), request.getOnSecurableFullname(), request.getName());
+ try {
+ Request req = new Request("DELETE", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, DeletePolicyResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public PolicyInfo getPolicy(GetPolicyRequest request) {
+ String path =
+ String.format(
+ "/api/2.1/unity-catalog/policies/%s/%s/%s",
+ request.getOnSecurableType(), request.getOnSecurableFullname(), request.getName());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, PolicyInfo.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public ListPoliciesResponse listPolicies(ListPoliciesRequest request) {
+ String path =
+ String.format(
+ "/api/2.1/unity-catalog/policies/%s/%s",
+ request.getOnSecurableType(), request.getOnSecurableFullname());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ListPoliciesResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public PolicyInfo updatePolicy(UpdatePolicyRequest request) {
+ String path =
+ String.format(
+ "/api/2.1/unity-catalog/policies/%s/%s/%s",
+ request.getOnSecurableType(), request.getOnSecurableFullname(), request.getName());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request.getPolicyInfo()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, PolicyInfo.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PoliciesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PoliciesService.java
new file mode 100755
index 000000000..58f70b4f4
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PoliciesService.java
@@ -0,0 +1,40 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Attribute-Based Access Control (ABAC) provides high leverage governance for enforcing compliance
+ * policies in Unity Catalog. With ABAC policies, access is controlled in a hierarchical and
+ * scalable manner, based on data attributes rather than specific resources, enabling more flexible
+ * and comprehensive access control. ABAC policies in Unity Catalog support conditions on securable
+ * properties, governance tags, and environment contexts. Callers must have the `MANAGE` privilege
+ * on a securable to view, create, update, or delete ABAC policies.
+ *
+ *
This is the high-level interface, that contains generated methods.
+ *
+ *
Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface PoliciesService {
+ /**
+ * Creates a new policy on a securable. The new policy applies to the securable and all its
+ * descendants.
+ */
+ PolicyInfo createPolicy(CreatePolicyRequest createPolicyRequest);
+
+ /** Delete an ABAC policy defined on a securable. */
+ DeletePolicyResponse deletePolicy(DeletePolicyRequest deletePolicyRequest);
+
+ /** Get the policy definition on a securable */
+ PolicyInfo getPolicy(GetPolicyRequest getPolicyRequest);
+
+ /**
+ * List all policies defined on a securable. Optionally, the list can include inherited policies
+ * defined on the securable's parent schema or catalog.
+ */
+ ListPoliciesResponse listPolicies(ListPoliciesRequest listPoliciesRequest);
+
+ /** Update an ABAC policy on a securable. */
+ PolicyInfo updatePolicy(UpdatePolicyRequest updatePolicyRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PolicyInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PolicyInfo.java
new file mode 100755
index 000000000..df6fd841e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PolicyInfo.java
@@ -0,0 +1,329 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class PolicyInfo {
+ /**
+ * Options for column mask policies. Valid only if `policy_type` is `POLICY_TYPE_COLUMN_MASK`.
+ * Required on create and optional on update. When specified on update, the new options will
+ * replace the existing options as a whole.
+ */
+ @JsonProperty("column_mask")
+ private ColumnMaskOptions columnMask;
+
+ /** Optional description of the policy. */
+ @JsonProperty("comment")
+ private String comment;
+
+ /** Time at which the policy was created, in epoch milliseconds. Output only. */
+ @JsonProperty("created_at")
+ private Long createdAt;
+
+ /** Username of the user who created the policy. Output only. */
+ @JsonProperty("created_by")
+ private String createdBy;
+
+ /** Optional list of user or group names that should be excluded from the policy. */
+ @JsonProperty("except_principals")
+ private Collection exceptPrincipals;
+
+ /**
+ * Type of securables that the policy should take effect on. Only `table` is supported at this
+ * moment. Required on create and optional on update.
+ */
+ @JsonProperty("for_securable_type")
+ private SecurableType forSecurableType;
+
+ /** Unique identifier of the policy. This field is output only and is generated by the system. */
+ @JsonProperty("id")
+ private String id;
+
+ /**
+ * Optional list of condition expressions used to match table columns. Only valid when
+ * `for_securable_type` is `table`. When specified, the policy only applies to tables whose
+ * columns satisfy all match conditions.
+ */
+ @JsonProperty("match_columns")
+ private Collection matchColumns;
+
+ /**
+ * Name of the policy. Required on create and ignored on update. To update the name, use the
+ * `new_name` field.
+ */
+ @JsonProperty("name")
+ private String name;
+
+ /**
+ * Full name of the securable on which the policy is defined. Required on create and ignored on
+ * update.
+ */
+ @JsonProperty("on_securable_fullname")
+ private String onSecurableFullname;
+
+ /**
+ * Type of the securable on which the policy is defined. Only `catalog`, `schema` and `table` are
+ * supported at this moment. Required on create and ignored on update.
+ */
+ @JsonProperty("on_securable_type")
+ private SecurableType onSecurableType;
+
+ /** Type of the policy. Required on create and ignored on update. */
+ @JsonProperty("policy_type")
+ private PolicyType policyType;
+
+ /**
+ * Options for row filter policies. Valid only if `policy_type` is `POLICY_TYPE_ROW_FILTER`.
+ * Required on create and optional on update. When specified on update, the new options will
+ * replace the existing options as a whole.
+ */
+ @JsonProperty("row_filter")
+ private RowFilterOptions rowFilter;
+
+ /**
+ * List of user or group names that the policy applies to. Required on create and optional on
+ * update.
+ */
+ @JsonProperty("to_principals")
+ private Collection toPrincipals;
+
+ /** Time at which the policy was last modified, in epoch milliseconds. Output only. */
+ @JsonProperty("updated_at")
+ private Long updatedAt;
+
+ /** Username of the user who last modified the policy. Output only. */
+ @JsonProperty("updated_by")
+ private String updatedBy;
+
+ /** Optional condition when the policy should take effect. */
+ @JsonProperty("when_condition")
+ private String whenCondition;
+
+ public PolicyInfo setColumnMask(ColumnMaskOptions columnMask) {
+ this.columnMask = columnMask;
+ return this;
+ }
+
+ public ColumnMaskOptions getColumnMask() {
+ return columnMask;
+ }
+
+ public PolicyInfo setComment(String comment) {
+ this.comment = comment;
+ return this;
+ }
+
+ public String getComment() {
+ return comment;
+ }
+
+ public PolicyInfo setCreatedAt(Long createdAt) {
+ this.createdAt = createdAt;
+ return this;
+ }
+
+ public Long getCreatedAt() {
+ return createdAt;
+ }
+
+ public PolicyInfo setCreatedBy(String createdBy) {
+ this.createdBy = createdBy;
+ return this;
+ }
+
+ public String getCreatedBy() {
+ return createdBy;
+ }
+
+ public PolicyInfo setExceptPrincipals(Collection exceptPrincipals) {
+ this.exceptPrincipals = exceptPrincipals;
+ return this;
+ }
+
+ public Collection getExceptPrincipals() {
+ return exceptPrincipals;
+ }
+
+ public PolicyInfo setForSecurableType(SecurableType forSecurableType) {
+ this.forSecurableType = forSecurableType;
+ return this;
+ }
+
+ public SecurableType getForSecurableType() {
+ return forSecurableType;
+ }
+
+ public PolicyInfo setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public PolicyInfo setMatchColumns(Collection matchColumns) {
+ this.matchColumns = matchColumns;
+ return this;
+ }
+
+ public Collection getMatchColumns() {
+ return matchColumns;
+ }
+
+ public PolicyInfo setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public PolicyInfo setOnSecurableFullname(String onSecurableFullname) {
+ this.onSecurableFullname = onSecurableFullname;
+ return this;
+ }
+
+ public String getOnSecurableFullname() {
+ return onSecurableFullname;
+ }
+
+ public PolicyInfo setOnSecurableType(SecurableType onSecurableType) {
+ this.onSecurableType = onSecurableType;
+ return this;
+ }
+
+ public SecurableType getOnSecurableType() {
+ return onSecurableType;
+ }
+
+ public PolicyInfo setPolicyType(PolicyType policyType) {
+ this.policyType = policyType;
+ return this;
+ }
+
+ public PolicyType getPolicyType() {
+ return policyType;
+ }
+
+ public PolicyInfo setRowFilter(RowFilterOptions rowFilter) {
+ this.rowFilter = rowFilter;
+ return this;
+ }
+
+ public RowFilterOptions getRowFilter() {
+ return rowFilter;
+ }
+
+ public PolicyInfo setToPrincipals(Collection toPrincipals) {
+ this.toPrincipals = toPrincipals;
+ return this;
+ }
+
+ public Collection getToPrincipals() {
+ return toPrincipals;
+ }
+
+ public PolicyInfo setUpdatedAt(Long updatedAt) {
+ this.updatedAt = updatedAt;
+ return this;
+ }
+
+ public Long getUpdatedAt() {
+ return updatedAt;
+ }
+
+ public PolicyInfo setUpdatedBy(String updatedBy) {
+ this.updatedBy = updatedBy;
+ return this;
+ }
+
+ public String getUpdatedBy() {
+ return updatedBy;
+ }
+
+ public PolicyInfo setWhenCondition(String whenCondition) {
+ this.whenCondition = whenCondition;
+ return this;
+ }
+
+ public String getWhenCondition() {
+ return whenCondition;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ PolicyInfo that = (PolicyInfo) o;
+ return Objects.equals(columnMask, that.columnMask)
+ && Objects.equals(comment, that.comment)
+ && Objects.equals(createdAt, that.createdAt)
+ && Objects.equals(createdBy, that.createdBy)
+ && Objects.equals(exceptPrincipals, that.exceptPrincipals)
+ && Objects.equals(forSecurableType, that.forSecurableType)
+ && Objects.equals(id, that.id)
+ && Objects.equals(matchColumns, that.matchColumns)
+ && Objects.equals(name, that.name)
+ && Objects.equals(onSecurableFullname, that.onSecurableFullname)
+ && Objects.equals(onSecurableType, that.onSecurableType)
+ && Objects.equals(policyType, that.policyType)
+ && Objects.equals(rowFilter, that.rowFilter)
+ && Objects.equals(toPrincipals, that.toPrincipals)
+ && Objects.equals(updatedAt, that.updatedAt)
+ && Objects.equals(updatedBy, that.updatedBy)
+ && Objects.equals(whenCondition, that.whenCondition);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ columnMask,
+ comment,
+ createdAt,
+ createdBy,
+ exceptPrincipals,
+ forSecurableType,
+ id,
+ matchColumns,
+ name,
+ onSecurableFullname,
+ onSecurableType,
+ policyType,
+ rowFilter,
+ toPrincipals,
+ updatedAt,
+ updatedBy,
+ whenCondition);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(PolicyInfo.class)
+ .add("columnMask", columnMask)
+ .add("comment", comment)
+ .add("createdAt", createdAt)
+ .add("createdBy", createdBy)
+ .add("exceptPrincipals", exceptPrincipals)
+ .add("forSecurableType", forSecurableType)
+ .add("id", id)
+ .add("matchColumns", matchColumns)
+ .add("name", name)
+ .add("onSecurableFullname", onSecurableFullname)
+ .add("onSecurableType", onSecurableType)
+ .add("policyType", policyType)
+ .add("rowFilter", rowFilter)
+ .add("toPrincipals", toPrincipals)
+ .add("updatedAt", updatedAt)
+ .add("updatedBy", updatedBy)
+ .add("whenCondition", whenCondition)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PolicyType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PolicyType.java
new file mode 100755
index 000000000..87e05d3fc
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PolicyType.java
@@ -0,0 +1,11 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum PolicyType {
+ POLICY_TYPE_COLUMN_MASK,
+ POLICY_TYPE_ROW_FILTER,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RowFilterOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RowFilterOptions.java
new file mode 100755
index 000000000..ae8fd0602
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RowFilterOptions.java
@@ -0,0 +1,67 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class RowFilterOptions {
+ /**
+ * The fully qualified name of the row filter function. The function is called on each row of the
+ * target table. It should return a boolean value indicating whether the row should be visible to
+ * the user. Required on create and update.
+ */
+ @JsonProperty("function_name")
+ private String functionName;
+
+ /**
+ * Optional list of column aliases or constant literals to be passed as arguments to the row
+ * filter function. The type of each column should match the positional argument of the row filter
+ * function.
+ */
+ @JsonProperty("using")
+ private Collection using;
+
+ public RowFilterOptions setFunctionName(String functionName) {
+ this.functionName = functionName;
+ return this;
+ }
+
+ public String getFunctionName() {
+ return functionName;
+ }
+
+ public RowFilterOptions setUsing(Collection using) {
+ this.using = using;
+ return this;
+ }
+
+ public Collection getUsing() {
+ return using;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ RowFilterOptions that = (RowFilterOptions) o;
+ return Objects.equals(functionName, that.functionName) && Objects.equals(using, that.using);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(functionName, using);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(RowFilterOptions.class)
+ .add("functionName", functionName)
+ .add("using", using)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java
index 62e8303b5..f8657471a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java
@@ -30,7 +30,7 @@ public SchemasAPI(SchemasService mock) {
}
/**
- * Creates a new schema for catalog in the Metatastore. The caller must be a metastore admin, or
+ * Creates a new schema for catalog in the Metastore. The caller must be a metastore admin, or
* have the **CREATE_SCHEMA** privilege in the parent catalog.
*/
public SchemaInfo create(CreateSchema request) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java
index 816ed12fa..abe123cb5 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java
@@ -16,7 +16,7 @@
@Generated
public interface SchemasService {
/**
- * Creates a new schema for catalog in the Metatastore. The caller must be a metastore admin, or
+ * Creates a new schema for catalog in the Metastore. The caller must be a metastore admin, or
* have the **CREATE_SCHEMA** privilege in the parent catalog.
*/
SchemaInfo create(CreateSchema createSchema);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemaInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemaInfo.java
index 03d76e0f4..3451b1ea1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemaInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemaInfo.java
@@ -16,7 +16,7 @@ public class SystemSchemaInfo {
/**
* The current state of enablement for the system schema. An empty string means the system schema
* is available and ready for opt-in. Possible values: AVAILABLE | ENABLE_INITIALIZED |
- * ENABLE_COMPLETED | DISABLE_INITIALIZED | UNAVAILABLE
+ * ENABLE_COMPLETED | DISABLE_INITIALIZED | UNAVAILABLE | MANAGED
*/
@JsonProperty("state")
private String state;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java
index 882f3d16a..71c7e2a1b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java
@@ -33,6 +33,33 @@ public TablesAPI(TablesService mock) {
impl = mock;
}
+ /**
+ * Creates a new table in the specified catalog and schema.
+ *
+ *
To create an external delta table, the caller must have the **EXTERNAL_USE_SCHEMA**
+ * privilege on the parent schema and the **EXTERNAL_USE_LOCATION** privilege on the external
+ * location. These privileges must always be granted explicitly, and cannot be inherited through
+ * ownership or **ALL_PRIVILEGES**.
+ *
+ *
Standard UC permissions needed to create tables still apply: **USE_CATALOG** on the parent
+ * catalog (or ownership of the parent catalog), **CREATE_TABLE** and **USE_SCHEMA** on the parent
+ * schema (or ownership of the parent schema), and **CREATE_EXTERNAL_TABLE** on external location.
+ *
+ *
The **columns** field needs to be in a Spark compatible format, so we recommend you use
+ * Spark to create these tables. The API itself does not validate the correctness of the column
+ * spec. If the spec is not Spark compatible, the tables may not be readable by Databricks
+ * Runtime.
+ *
+ *
NOTE: The Create Table API for external clients only supports creating **external delta
+ * tables**. The values shown in the respective enums are all values supported by Databricks,
+ * however for this specific Create Table API, only **table_type** **EXTERNAL** and
+ * **data_source_format** **DELTA** are supported. Additionally, column masks are not supported
+ * when creating tables through this API.
+ */
+ public TableInfo create(CreateTableRequest request) {
+ return impl.create(request);
+ }
+
public void delete(String fullName) {
delete(new DeleteTableRequest().setFullName(fullName));
}
@@ -54,11 +81,11 @@ public TableExistsResponse exists(String fullName) {
/**
* Gets if a table exists in the metastore for a specific catalog and schema. The caller must
* satisfy one of the following requirements: * Be a metastore admin * Be the owner of the parent
- * catalog * Be the owner of the parent schema and have the USE_CATALOG privilege on the parent
- * catalog * Have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA**
- * privilege on the parent schema, and either be the table owner or have the SELECT privilege on
- * the table. * Have BROWSE privilege on the parent catalog * Have BROWSE privilege on the parent
- * schema.
+ * catalog * Be the owner of the parent schema and have the **USE_CATALOG** privilege on the
+ * parent catalog * Have the **USE_CATALOG** privilege on the parent catalog and the
+ * **USE_SCHEMA** privilege on the parent schema, and either be the table owner or have the
+ * **SELECT** privilege on the table. * Have **BROWSE** privilege on the parent catalog * Have
+ * **BROWSE** privilege on the parent schema
*/
public TableExistsResponse exists(ExistsRequest request) {
return impl.exists(request);
@@ -71,9 +98,9 @@ public TableInfo get(String fullName) {
/**
* Gets a table from the metastore for a specific catalog and schema. The caller must satisfy one
* of the following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be
- * the owner of the parent schema and have the USE_CATALOG privilege on the parent catalog * Have
- * the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the
- * parent schema, and either be the table owner or have the SELECT privilege on the table.
+ * the owner of the parent schema and have the **USE_CATALOG** privilege on the parent catalog *
+ * Have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on
+ * the parent schema, and either be the table owner or have the **SELECT** privilege on the table.
*/
public TableInfo get(GetTableRequest request) {
return impl.get(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesImpl.java
index 0c9058bbd..959b61096 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesImpl.java
@@ -16,6 +16,20 @@ public TablesImpl(ApiClient apiClient) {
this.apiClient = apiClient;
}
+ @Override
+ public TableInfo create(CreateTableRequest request) {
+ String path = "/api/2.1/unity-catalog/tables";
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, TableInfo.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public void delete(DeleteTableRequest request) {
String path = String.format("/api/2.1/unity-catalog/tables/%s", request.getFullName());
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java
index 1b993a709..6c08d2bc0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java
@@ -19,6 +19,31 @@
*/
@Generated
public interface TablesService {
+ /**
+ * Creates a new table in the specified catalog and schema.
+ *
+ *
To create an external delta table, the caller must have the **EXTERNAL_USE_SCHEMA**
+ * privilege on the parent schema and the **EXTERNAL_USE_LOCATION** privilege on the external
+ * location. These privileges must always be granted explicitly, and cannot be inherited through
+ * ownership or **ALL_PRIVILEGES**.
+ *
+ *
Standard UC permissions needed to create tables still apply: **USE_CATALOG** on the parent
+ * catalog (or ownership of the parent catalog), **CREATE_TABLE** and **USE_SCHEMA** on the parent
+ * schema (or ownership of the parent schema), and **CREATE_EXTERNAL_TABLE** on external location.
+ *
+ *
The **columns** field needs to be in a Spark compatible format, so we recommend you use
+ * Spark to create these tables. The API itself does not validate the correctness of the column
+ * spec. If the spec is not Spark compatible, the tables may not be readable by Databricks
+ * Runtime.
+ *
+ *
NOTE: The Create Table API for external clients only supports creating **external delta
+ * tables**. The values shown in the respective enums are all values supported by Databricks,
+ * however for this specific Create Table API, only **table_type** **EXTERNAL** and
+ * **data_source_format** **DELTA** are supported. Additionally, column masks are not supported
+ * when creating tables through this API.
+ */
+ TableInfo create(CreateTableRequest createTableRequest);
+
/**
* Deletes a table from the specified parent catalog and schema. The caller must be the owner of
* the parent catalog, have the **USE_CATALOG** privilege on the parent catalog and be the owner
@@ -30,20 +55,20 @@ public interface TablesService {
/**
* Gets if a table exists in the metastore for a specific catalog and schema. The caller must
* satisfy one of the following requirements: * Be a metastore admin * Be the owner of the parent
- * catalog * Be the owner of the parent schema and have the USE_CATALOG privilege on the parent
- * catalog * Have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA**
- * privilege on the parent schema, and either be the table owner or have the SELECT privilege on
- * the table. * Have BROWSE privilege on the parent catalog * Have BROWSE privilege on the parent
- * schema.
+ * catalog * Be the owner of the parent schema and have the **USE_CATALOG** privilege on the
+ * parent catalog * Have the **USE_CATALOG** privilege on the parent catalog and the
+ * **USE_SCHEMA** privilege on the parent schema, and either be the table owner or have the
+ * **SELECT** privilege on the table. * Have **BROWSE** privilege on the parent catalog * Have
+ * **BROWSE** privilege on the parent schema
*/
TableExistsResponse exists(ExistsRequest existsRequest);
/**
* Gets a table from the metastore for a specific catalog and schema. The caller must satisfy one
* of the following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be
- * the owner of the parent schema and have the USE_CATALOG privilege on the parent catalog * Have
- * the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the
- * parent schema, and either be the table owner or have the SELECT privilege on the table.
+ * the owner of the parent schema and have the **USE_CATALOG** privilege on the parent catalog *
+ * Have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on
+ * the parent schema, and either be the table owner or have the **SELECT** privilege on the table.
*/
TableInfo get(GetTableRequest getTableRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsAPI.java
new file mode 100755
index 000000000..a8d040a53
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsAPI.java
@@ -0,0 +1,67 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Temporary Path Credentials refer to short-lived, downscoped credentials used to access external
+ * cloud storage locations registered in Databricks. These credentials are employed to provide
+ * secure and time-limited access to data in cloud environments such as AWS, Azure, and Google
+ * Cloud. Each cloud provider has its own type of credentials: AWS uses temporary session tokens via
+ * AWS Security Token Service (STS), Azure utilizes Shared Access Signatures (SAS) for its data
+ * storage services, and Google Cloud supports temporary credentials through OAuth 2.0.
+ *
+ *
Temporary path credentials ensure that data access is limited in scope and duration, reducing
+ * the risk of unauthorized access or misuse. To use the temporary path credentials API, a metastore
+ * admin needs to enable the external_access_enabled flag (off by default) at the metastore level. A
+ * user needs to be granted the EXTERNAL USE LOCATION permission by external location owner. For
+ * requests on existing external tables, user also needs to be granted the EXTERNAL USE SCHEMA
+ * permission at the schema level by catalog admin.
+ *
+ *
Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by catalog
+ * admin explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for
+ * security reasons. Similarly, EXTERNAL USE LOCATION is an external location level permission that
+ * can only be granted by external location owner explicitly and is not included in external
+ * location ownership or ALL PRIVILEGES on the external location for security reasons.
+ *
+ *
This API only supports temporary path credentials for external locations and external tables,
+ * and volumes will be supported in the future.
+ */
+@Generated
+public class TemporaryPathCredentialsAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(TemporaryPathCredentialsAPI.class);
+
+ private final TemporaryPathCredentialsService impl;
+
+ /** Regular-use constructor */
+ public TemporaryPathCredentialsAPI(ApiClient apiClient) {
+ impl = new TemporaryPathCredentialsImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public TemporaryPathCredentialsAPI(TemporaryPathCredentialsService mock) {
+ impl = mock;
+ }
+
+ /**
+ * Get a short-lived credential for directly accessing cloud storage locations registered in
+ * Databricks. The Generate Temporary Path Credentials API is only supported for external storage
+ * paths, specifically external locations and external tables. Managed tables are not supported by
+ * this API. The metastore must have **external_access_enabled** flag set to true (default false).
+ * The caller must have the **EXTERNAL_USE_LOCATION** privilege on the external location; this
+ * privilege can only be granted by external location owners. For requests on existing external
+ * tables, the caller must also have the **EXTERNAL_USE_SCHEMA** privilege on the parent schema;
+ * this privilege can only be granted by catalog owners.
+ */
+ public GenerateTemporaryPathCredentialResponse generateTemporaryPathCredentials(
+ GenerateTemporaryPathCredentialRequest request) {
+ return impl.generateTemporaryPathCredentials(request);
+ }
+
+ public TemporaryPathCredentialsService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsImpl.java
new file mode 100755
index 000000000..cf98efdfb
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsImpl.java
@@ -0,0 +1,33 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of TemporaryPathCredentials */
+@Generated
+class TemporaryPathCredentialsImpl implements TemporaryPathCredentialsService {
+ private final ApiClient apiClient;
+
+ public TemporaryPathCredentialsImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public GenerateTemporaryPathCredentialResponse generateTemporaryPathCredentials(
+ GenerateTemporaryPathCredentialRequest request) {
+ String path = "/api/2.0/unity-catalog/temporary-path-credentials";
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, GenerateTemporaryPathCredentialResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsService.java
new file mode 100755
index 000000000..9a43feb56
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsService.java
@@ -0,0 +1,48 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Temporary Path Credentials refer to short-lived, downscoped credentials used to access external
+ * cloud storage locations registered in Databricks. These credentials are employed to provide
+ * secure and time-limited access to data in cloud environments such as AWS, Azure, and Google
+ * Cloud. Each cloud provider has its own type of credentials: AWS uses temporary session tokens via
+ * AWS Security Token Service (STS), Azure utilizes Shared Access Signatures (SAS) for its data
+ * storage services, and Google Cloud supports temporary credentials through OAuth 2.0.
+ *
+ *
Temporary path credentials ensure that data access is limited in scope and duration, reducing
+ * the risk of unauthorized access or misuse. To use the temporary path credentials API, a metastore
+ * admin needs to enable the external_access_enabled flag (off by default) at the metastore level. A
+ * user needs to be granted the EXTERNAL USE LOCATION permission by external location owner. For
+ * requests on existing external tables, user also needs to be granted the EXTERNAL USE SCHEMA
+ * permission at the schema level by catalog admin.
+ *
+ *
Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by catalog
+ * admin explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for
+ * security reasons. Similarly, EXTERNAL USE LOCATION is an external location level permission that
+ * can only be granted by external location owner explicitly and is not included in external
+ * location ownership or ALL PRIVILEGES on the external location for security reasons.
+ *
+ *
This API only supports temporary path credentials for external locations and external tables,
+ * and volumes will be supported in the future.
+ *
+ *
This is the high-level interface, that contains generated methods.
+ *
+ *
Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface TemporaryPathCredentialsService {
+ /**
+ * Get a short-lived credential for directly accessing cloud storage locations registered in
+ * Databricks. The Generate Temporary Path Credentials API is only supported for external storage
+ * paths, specifically external locations and external tables. Managed tables are not supported by
+ * this API. The metastore must have **external_access_enabled** flag set to true (default false).
+ * The caller must have the **EXTERNAL_USE_LOCATION** privilege on the external location; this
+ * privilege can only be granted by external location owners. For requests on existing external
+ * tables, the caller must also have the **EXTERNAL_USE_SCHEMA** privilege on the parent schema;
+ * this privilege can only be granted by catalog owners.
+ */
+ GenerateTemporaryPathCredentialResponse generateTemporaryPathCredentials(
+ GenerateTemporaryPathCredentialRequest generateTemporaryPathCredentialRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsAPI.java
index 900b1179b..2d7da4059 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsAPI.java
@@ -8,18 +8,19 @@
/**
* Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud
- * storage locationswhere table data is stored in Databricks. These credentials are employed to
- * provide secure and time-limitedaccess to data in cloud environments such as AWS, Azure, and
- * Google Cloud. Each cloud provider has its own typeof credentials: AWS uses temporary session
- * tokens via AWS Security Token Service (STS), Azure utilizesShared Access Signatures (SAS) for its
- * data storage services, and Google Cloud supports temporary credentialsthrough OAuth 2.0.Temporary
- * table credentials ensure that data access is limited in scope and duration, reducing the risk
- * ofunauthorized access or misuse. To use the temporary table credentials API, a metastore admin
- * needs to enable the external_access_enabled flag (off by default) at the metastore level, and
- * user needs to be granted the EXTERNAL USE SCHEMA permission at the schema level by catalog admin.
- * Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by catalog
- * admin explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for
- * security reason.
+ * storage locations where table data is stored in Databricks. These credentials are employed to
+ * provide secure and time-limited access to data in cloud environments such as AWS, Azure, and
+ * Google Cloud. Each cloud provider has its own type of credentials: AWS uses temporary session
+ * tokens via AWS Security Token Service (STS), Azure utilizes Shared Access Signatures (SAS) for
+ * its data storage services, and Google Cloud supports temporary credentials through OAuth 2.0.
+ *
+ *
Temporary table credentials ensure that data access is limited in scope and duration, reducing
+ * the risk of unauthorized access or misuse. To use the temporary table credentials API, a
+ * metastore admin needs to enable the external_access_enabled flag (off by default) at the
+ * metastore level, and user needs to be granted the EXTERNAL USE SCHEMA permission at the schema
+ * level by catalog admin. Note that EXTERNAL USE SCHEMA is a schema level permission that can only
+ * be granted by catalog admin explicitly and is not included in schema ownership or ALL PRIVILEGES
+ * on the schema for security reasons.
*/
@Generated
public class TemporaryTableCredentialsAPI {
@@ -39,9 +40,9 @@ public TemporaryTableCredentialsAPI(TemporaryTableCredentialsService mock) {
/**
* Get a short-lived credential for directly accessing the table data on cloud storage. The
- * metastore must have external_access_enabled flag set to true (default false). The caller must
- * have EXTERNAL_USE_SCHEMA privilege on the parent schema and this privilege can only be granted
- * by catalog owners.
+ * metastore must have **external_access_enabled** flag set to true (default false). The caller
+ * must have the **EXTERNAL_USE_SCHEMA** privilege on the parent schema and this privilege can
+ * only be granted by catalog owners.
*/
public GenerateTemporaryTableCredentialResponse generateTemporaryTableCredentials(
GenerateTemporaryTableCredentialRequest request) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsService.java
index d8cc3e5bf..10a02b1cd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsService.java
@@ -5,18 +5,19 @@
/**
* Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud
- * storage locationswhere table data is stored in Databricks. These credentials are employed to
- * provide secure and time-limitedaccess to data in cloud environments such as AWS, Azure, and
- * Google Cloud. Each cloud provider has its own typeof credentials: AWS uses temporary session
- * tokens via AWS Security Token Service (STS), Azure utilizesShared Access Signatures (SAS) for its
- * data storage services, and Google Cloud supports temporary credentialsthrough OAuth 2.0.Temporary
- * table credentials ensure that data access is limited in scope and duration, reducing the risk
- * ofunauthorized access or misuse. To use the temporary table credentials API, a metastore admin
- * needs to enable the external_access_enabled flag (off by default) at the metastore level, and
- * user needs to be granted the EXTERNAL USE SCHEMA permission at the schema level by catalog admin.
- * Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by catalog
- * admin explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for
- * security reason.
+ * storage locations where table data is stored in Databricks. These credentials are employed to
+ * provide secure and time-limited access to data in cloud environments such as AWS, Azure, and
+ * Google Cloud. Each cloud provider has its own type of credentials: AWS uses temporary session
+ * tokens via AWS Security Token Service (STS), Azure utilizes Shared Access Signatures (SAS) for
+ * its data storage services, and Google Cloud supports temporary credentials through OAuth 2.0.
+ *
+ *
Temporary table credentials ensure that data access is limited in scope and duration, reducing
+ * the risk of unauthorized access or misuse. To use the temporary table credentials API, a
+ * metastore admin needs to enable the external_access_enabled flag (off by default) at the
+ * metastore level, and user needs to be granted the EXTERNAL USE SCHEMA permission at the schema
+ * level by catalog admin. Note that EXTERNAL USE SCHEMA is a schema level permission that can only
+ * be granted by catalog admin explicitly and is not included in schema ownership or ALL PRIVILEGES
+ * on the schema for security reasons.
*
*
This is the high-level interface, that contains generated methods.
*
@@ -26,9 +27,9 @@
public interface TemporaryTableCredentialsService {
/**
* Get a short-lived credential for directly accessing the table data on cloud storage. The
- * metastore must have external_access_enabled flag set to true (default false). The caller must
- * have EXTERNAL_USE_SCHEMA privilege on the parent schema and this privilege can only be granted
- * by catalog owners.
+ * metastore must have **external_access_enabled** flag set to true (default false). The caller
+ * must have the **EXTERNAL_USE_SCHEMA** privilege on the parent schema and this privilege can
+ * only be granted by catalog owners.
*/
GenerateTemporaryTableCredentialResponse generateTemporaryTableCredentials(
GenerateTemporaryTableCredentialRequest generateTemporaryTableCredentialRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePolicyRequest.java
new file mode 100755
index 000000000..bd6369215
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePolicyRequest.java
@@ -0,0 +1,114 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdatePolicyRequest {
+ /** Required. The name of the policy to update. */
+ @JsonIgnore private String name;
+
+ /** Required. The fully qualified name of the securable to update the policy for. */
+ @JsonIgnore private String onSecurableFullname;
+
+ /** Required. The type of the securable to update the policy for. */
+ @JsonIgnore private String onSecurableType;
+
+ /**
+ * Optional fields to update. This is the request body for updating a policy. Use `update_mask`
+ * field to specify which fields in the request is to be updated. - If `update_mask` is empty or
+ * "*", all specified fields will be updated. - If `update_mask` is specified, only the fields
+ * specified in the `update_mask` will be updated. If a field is specified in `update_mask` and
+ * not set in the request, the field will be cleared. Users can use the update mask to explicitly
+ * unset optional fields such as `exception_principals` and `when_condition`.
+ */
+ @JsonProperty("policy_info")
+ private PolicyInfo policyInfo;
+
+ /**
+ * Optional. The update mask field for specifying user intentions on which fields to update in the
+ * request.
+ */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private String updateMask;
+
+ public UpdatePolicyRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public UpdatePolicyRequest setOnSecurableFullname(String onSecurableFullname) {
+ this.onSecurableFullname = onSecurableFullname;
+ return this;
+ }
+
+ public String getOnSecurableFullname() {
+ return onSecurableFullname;
+ }
+
+ public UpdatePolicyRequest setOnSecurableType(String onSecurableType) {
+ this.onSecurableType = onSecurableType;
+ return this;
+ }
+
+ public String getOnSecurableType() {
+ return onSecurableType;
+ }
+
+ public UpdatePolicyRequest setPolicyInfo(PolicyInfo policyInfo) {
+ this.policyInfo = policyInfo;
+ return this;
+ }
+
+ public PolicyInfo getPolicyInfo() {
+ return policyInfo;
+ }
+
+ public UpdatePolicyRequest setUpdateMask(String updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public String getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdatePolicyRequest that = (UpdatePolicyRequest) o;
+ return Objects.equals(name, that.name)
+ && Objects.equals(onSecurableFullname, that.onSecurableFullname)
+ && Objects.equals(onSecurableType, that.onSecurableType)
+ && Objects.equals(policyInfo, that.policyInfo)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name, onSecurableFullname, onSecurableType, policyInfo, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdatePolicyRequest.class)
+ .add("name", name)
+ .add("onSecurableFullname", onSecurableFullname)
+ .add("onSecurableType", onSecurableType)
+ .add("policyInfo", policyInfo)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAsset.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAsset.java
index ce39e20d8..92ccceeeb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAsset.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAsset.java
@@ -46,7 +46,8 @@ public class CleanRoomAsset {
*
For UC securable assets (tables, volumes, etc.), the format is
* *shared_catalog*.*shared_schema*.*asset_name*
*
- *
For notebooks, the name is the notebook file name.
+ *
For notebooks, the name is the notebook file name. For jar analyses, the name is the jar
+ * analysis name.
*/
@JsonProperty("name")
private String name;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetNotebook.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetNotebook.java
index 299fdd186..d56dd9b48 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetNotebook.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetNotebook.java
@@ -21,7 +21,7 @@ public class CleanRoomAssetNotebook {
@JsonProperty("notebook_content")
private String notebookContent;
- /** top-level status derived from all reviews */
+ /** Top-level status derived from all reviews */
@JsonProperty("review_state")
private CleanRoomNotebookReviewNotebookReviewState reviewState;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsAPI.java
index 3a5c80156..2d8a064ba 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsAPI.java
@@ -38,7 +38,7 @@ public CleanRoomAsset create(CreateCleanRoomAssetRequest request) {
return impl.create(request);
}
- /** submit an asset review */
+ /** Submit an asset review */
public CreateCleanRoomAssetReviewResponse createCleanRoomAssetReview(
CreateCleanRoomAssetReviewRequest request) {
return impl.createCleanRoomAssetReview(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsService.java
index cb7b9a09c..e1513e38d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsService.java
@@ -22,7 +22,7 @@ public interface CleanRoomAssetsService {
*/
CleanRoomAsset create(CreateCleanRoomAssetRequest createCleanRoomAssetRequest);
- /** submit an asset review */
+ /** Submit an asset review */
CreateCleanRoomAssetReviewResponse createCleanRoomAssetReview(
CreateCleanRoomAssetReviewRequest createCleanRoomAssetReviewRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAutoApprovalRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAutoApprovalRule.java
index 694db4de6..f9697996f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAutoApprovalRule.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAutoApprovalRule.java
@@ -9,11 +9,17 @@
@Generated
public class CleanRoomAutoApprovalRule {
- /** */
+ /**
+ * Collaborator alias of the author covered by the rule. Only one of `author_collaborator_alias`
+ * and `author_scope` can be set.
+ */
@JsonProperty("author_collaborator_alias")
private String authorCollaboratorAlias;
- /** */
+ /**
+ * Scope of authors covered by the rule. Only one of `author_collaborator_alias` and
+ * `author_scope` can be set.
+ */
@JsonProperty("author_scope")
private CleanRoomAutoApprovalRuleAuthorScope authorScope;
@@ -33,7 +39,7 @@ public class CleanRoomAutoApprovalRule {
@JsonProperty("rule_owner_collaborator_alias")
private String ruleOwnerCollaboratorAlias;
- /** */
+ /** Collaborator alias of the runner covered by the rule. */
@JsonProperty("runner_collaborator_alias")
private String runnerCollaboratorAlias;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookReview.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookReview.java
index 035bd6b57..0b33a1bab 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookReview.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookReview.java
@@ -9,23 +9,23 @@
@Generated
public class CleanRoomNotebookReview {
- /** review comment */
+ /** Review comment */
@JsonProperty("comment")
private String comment;
- /** timestamp of when the review was submitted */
+ /** When the review was submitted, in epoch milliseconds */
@JsonProperty("created_at_millis")
private Long createdAtMillis;
- /** review outcome */
+ /** Review outcome */
@JsonProperty("review_state")
private CleanRoomNotebookReviewNotebookReviewState reviewState;
- /** specified when the review was not explicitly made by a user */
+ /** Specified when the review was not explicitly made by a user */
@JsonProperty("review_sub_reason")
private CleanRoomNotebookReviewNotebookReviewSubReason reviewSubReason;
- /** collaborator alias of the reviewer */
+ /** Collaborator alias of the reviewer */
@JsonProperty("reviewer_collaborator_alias")
private String reviewerCollaboratorAlias;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomAssetReviewRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomAssetReviewRequest.java
index ec3155524..4ffa73f39 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomAssetReviewRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomAssetReviewRequest.java
@@ -10,7 +10,7 @@
@Generated
public class CreateCleanRoomAssetReviewRequest {
- /** can only be NOTEBOOK_FILE for now */
+ /** Asset type. Can only be NOTEBOOK_FILE. */
@JsonIgnore private CleanRoomAssetAssetType assetType;
/** Name of the clean room */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomAssetReviewResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomAssetReviewResponse.java
index 0a9dbad0a..8ab62a45c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomAssetReviewResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomAssetReviewResponse.java
@@ -10,7 +10,7 @@
@Generated
public class CreateCleanRoomAssetReviewResponse {
- /** top-level status derived from all reviews */
+ /** Top-level status derived from all reviews */
@JsonProperty("notebook_review_state")
private CleanRoomNotebookReviewNotebookReviewState notebookReviewState;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/NotebookVersionReview.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/NotebookVersionReview.java
index 96e56044c..7bfbb3d3f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/NotebookVersionReview.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/NotebookVersionReview.java
@@ -9,15 +9,15 @@
@Generated
public class NotebookVersionReview {
- /** review comment */
+ /** Review comment */
@JsonProperty("comment")
private String comment;
- /** etag that identifies the notebook version */
+ /** Etag identifying the notebook version */
@JsonProperty("etag")
private String etag;
- /** review outcome */
+ /** Review outcome */
@JsonProperty("review_state")
private CleanRoomNotebookReviewNotebookReviewState reviewState;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomAssetRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomAssetRequest.java
index f5c2d54fa..57d530c52 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomAssetRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomAssetRequest.java
@@ -30,7 +30,8 @@ public class UpdateCleanRoomAssetRequest {
*
For UC securable assets (tables, volumes, etc.), the format is
* *shared_catalog*.*shared_schema*.*asset_name*
*
- *
For notebooks, the name is the notebook file name.
+ *
For notebooks, the name is the notebook file name. For jar analyses, the name is the jar
+ * analysis name.
*/
@JsonIgnore private String name;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GcpAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GcpAttributes.java
index 61d5be7d7..125767030 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GcpAttributes.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GcpAttributes.java
@@ -22,6 +22,18 @@ public class GcpAttributes {
@JsonProperty("boot_disk_size")
private Long bootDiskSize;
+ /**
+ * The first `first_on_demand` nodes of the cluster will be placed on on-demand instances. This
+ * value should be greater than 0, to make sure the cluster driver node is placed on an on-demand
+ * instance. If this value is greater than or equal to the current cluster size, all nodes will be
+ * placed on on-demand instances. If this value is less than the current cluster size,
+ * `first_on_demand` nodes will be placed on on-demand instances and the remainder will be placed
+ * on `availability` instances. Note that this value does not affect cluster size and cannot
+ * currently be mutated over the lifetime of a cluster.
+ */
+ @JsonProperty("first_on_demand")
+ private Long firstOnDemand;
+
/**
* If provided, the cluster will impersonate the google service account when accessing gcloud
* services (like GCS). The google service account must have previously been added to the
@@ -77,6 +89,15 @@ public Long getBootDiskSize() {
return bootDiskSize;
}
+ public GcpAttributes setFirstOnDemand(Long firstOnDemand) {
+ this.firstOnDemand = firstOnDemand;
+ return this;
+ }
+
+ public Long getFirstOnDemand() {
+ return firstOnDemand;
+ }
+
public GcpAttributes setGoogleServiceAccount(String googleServiceAccount) {
this.googleServiceAccount = googleServiceAccount;
return this;
@@ -120,6 +141,7 @@ public boolean equals(Object o) {
GcpAttributes that = (GcpAttributes) o;
return Objects.equals(availability, that.availability)
&& Objects.equals(bootDiskSize, that.bootDiskSize)
+ && Objects.equals(firstOnDemand, that.firstOnDemand)
&& Objects.equals(googleServiceAccount, that.googleServiceAccount)
&& Objects.equals(localSsdCount, that.localSsdCount)
&& Objects.equals(usePreemptibleExecutors, that.usePreemptibleExecutors)
@@ -131,6 +153,7 @@ public int hashCode() {
return Objects.hash(
availability,
bootDiskSize,
+ firstOnDemand,
googleServiceAccount,
localSsdCount,
usePreemptibleExecutors,
@@ -142,6 +165,7 @@ public String toString() {
return new ToStringer(GcpAttributes.class)
.add("availability", availability)
.add("bootDiskSize", bootDiskSize)
+ .add("firstOnDemand", firstOnDemand)
.add("googleServiceAccount", googleServiceAccount)
.add("localSsdCount", localSsdCount)
.add("usePreemptibleExecutors", usePreemptibleExecutors)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java
index 196f37bc6..8c96210c2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java
@@ -206,12 +206,36 @@ public SyncedDatabaseTable getSyncedDatabaseTable(GetSyncedDatabaseTableRequest
return impl.getSyncedDatabaseTable(request);
}
+ public Iterable listDatabaseCatalogs(String instanceName) {
+ return listDatabaseCatalogs(new ListDatabaseCatalogsRequest().setInstanceName(instanceName));
+ }
+
+ /** This API is currently unimplemented, but exposed for Terraform support. */
+ public Iterable listDatabaseCatalogs(ListDatabaseCatalogsRequest request) {
+ return new Paginator<>(
+ request,
+ impl::listDatabaseCatalogs,
+ ListDatabaseCatalogsResponse::getDatabaseCatalogs,
+ response -> {
+ String token = response.getNextPageToken();
+ if (token == null || token.isEmpty()) {
+ return null;
+ }
+ return request.setPageToken(token);
+ });
+ }
+
public Iterable listDatabaseInstanceRoles(String instanceName) {
return listDatabaseInstanceRoles(
new ListDatabaseInstanceRolesRequest().setInstanceName(instanceName));
}
- /** START OF PG ROLE APIs Section */
+ /**
+ * START OF PG ROLE APIs Section These APIs are marked a PUBLIC with stage < PUBLIC_PREVIEW. With
+ * more recent Lakebase V2 plans, we don't plan to ever advance these to PUBLIC_PREVIEW. These
+ * APIs will remain effectively undocumented/UI-only and we'll aim for a new public roles API as
+ * part of V2 PuPr.
+ */
public Iterable listDatabaseInstanceRoles(
ListDatabaseInstanceRolesRequest request) {
return new Paginator<>(
@@ -242,11 +266,42 @@ public Iterable listDatabaseInstances(ListDatabaseInstancesReq
});
}
+ public Iterable listSyncedDatabaseTables(String instanceName) {
+ return listSyncedDatabaseTables(
+ new ListSyncedDatabaseTablesRequest().setInstanceName(instanceName));
+ }
+
+ /** This API is currently unimplemented, but exposed for Terraform support. */
+ public Iterable listSyncedDatabaseTables(
+ ListSyncedDatabaseTablesRequest request) {
+ return new Paginator<>(
+ request,
+ impl::listSyncedDatabaseTables,
+ ListSyncedDatabaseTablesResponse::getSyncedTables,
+ response -> {
+ String token = response.getNextPageToken();
+ if (token == null || token.isEmpty()) {
+ return null;
+ }
+ return request.setPageToken(token);
+ });
+ }
+
+ /** This API is currently unimplemented, but exposed for Terraform support. */
+ public DatabaseCatalog updateDatabaseCatalog(UpdateDatabaseCatalogRequest request) {
+ return impl.updateDatabaseCatalog(request);
+ }
+
/** Update a Database Instance. */
public DatabaseInstance updateDatabaseInstance(UpdateDatabaseInstanceRequest request) {
return impl.updateDatabaseInstance(request);
}
+ /** This API is currently unimplemented, but exposed for Terraform support. */
+ public SyncedDatabaseTable updateSyncedDatabaseTable(UpdateSyncedDatabaseTableRequest request) {
+ return impl.updateSyncedDatabaseTable(request);
+ }
+
public DatabaseService impl() {
return impl;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java
index 702b11a4c..f58d3a0bc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java
@@ -251,6 +251,20 @@ public SyncedDatabaseTable getSyncedDatabaseTable(GetSyncedDatabaseTableRequest
}
}
+ @Override
+ public ListDatabaseCatalogsResponse listDatabaseCatalogs(ListDatabaseCatalogsRequest request) {
+ String path =
+ String.format("/api/2.0/database/instances/%s/catalogs", request.getInstanceName());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ListDatabaseCatalogsResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public ListDatabaseInstanceRolesResponse listDatabaseInstanceRoles(
ListDatabaseInstanceRolesRequest request) {
@@ -278,6 +292,35 @@ public ListDatabaseInstancesResponse listDatabaseInstances(ListDatabaseInstances
}
}
+ @Override
+ public ListSyncedDatabaseTablesResponse listSyncedDatabaseTables(
+ ListSyncedDatabaseTablesRequest request) {
+ String path =
+ String.format("/api/2.0/database/instances/%s/synced_tables", request.getInstanceName());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ListSyncedDatabaseTablesResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public DatabaseCatalog updateDatabaseCatalog(UpdateDatabaseCatalogRequest request) {
+ String path = String.format("/api/2.0/database/catalogs/%s", request.getName());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request.getDatabaseCatalog()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, DatabaseCatalog.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public DatabaseInstance updateDatabaseInstance(UpdateDatabaseInstanceRequest request) {
String path = String.format("/api/2.0/database/instances/%s", request.getName());
@@ -291,4 +334,18 @@ public DatabaseInstance updateDatabaseInstance(UpdateDatabaseInstanceRequest req
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
}
+
+ @Override
+ public SyncedDatabaseTable updateSyncedDatabaseTable(UpdateSyncedDatabaseTableRequest request) {
+ String path = String.format("/api/2.0/database/synced_tables/%s", request.getName());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request.getSyncedTable()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, SyncedDatabaseTable.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java
index 8f67c97ff..4f6d40276 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java
@@ -74,7 +74,16 @@ DatabaseInstanceRole getDatabaseInstanceRole(
SyncedDatabaseTable getSyncedDatabaseTable(
GetSyncedDatabaseTableRequest getSyncedDatabaseTableRequest);
- /** START OF PG ROLE APIs Section */
+ /** This API is currently unimplemented, but exposed for Terraform support. */
+ ListDatabaseCatalogsResponse listDatabaseCatalogs(
+ ListDatabaseCatalogsRequest listDatabaseCatalogsRequest);
+
+ /**
+ * START OF PG ROLE APIs Section These APIs are marked a PUBLIC with stage < PUBLIC_PREVIEW. With
+ * more recent Lakebase V2 plans, we don't plan to ever advance these to PUBLIC_PREVIEW. These
+ * APIs will remain effectively undocumented/UI-only and we'll aim for a new public roles API as
+ * part of V2 PuPr.
+ */
ListDatabaseInstanceRolesResponse listDatabaseInstanceRoles(
ListDatabaseInstanceRolesRequest listDatabaseInstanceRolesRequest);
@@ -82,7 +91,18 @@ ListDatabaseInstanceRolesResponse listDatabaseInstanceRoles(
ListDatabaseInstancesResponse listDatabaseInstances(
ListDatabaseInstancesRequest listDatabaseInstancesRequest);
+ /** This API is currently unimplemented, but exposed for Terraform support. */
+ ListSyncedDatabaseTablesResponse listSyncedDatabaseTables(
+ ListSyncedDatabaseTablesRequest listSyncedDatabaseTablesRequest);
+
+ /** This API is currently unimplemented, but exposed for Terraform support. */
+ DatabaseCatalog updateDatabaseCatalog(UpdateDatabaseCatalogRequest updateDatabaseCatalogRequest);
+
/** Update a Database Instance. */
DatabaseInstance updateDatabaseInstance(
UpdateDatabaseInstanceRequest updateDatabaseInstanceRequest);
+
+ /** This API is currently unimplemented, but exposed for Terraform support. */
+ SyncedDatabaseTable updateSyncedDatabaseTable(
+ UpdateSyncedDatabaseTableRequest updateSyncedDatabaseTableRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseCatalogsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseCatalogsRequest.java
new file mode 100755
index 000000000..d82b5746d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseCatalogsRequest.java
@@ -0,0 +1,79 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListDatabaseCatalogsRequest {
+ /** Name of the instance to get database catalogs for. */
+ @JsonIgnore private String instanceName;
+
+ /** Upper bound for items returned. */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /**
+ * Pagination token to go to the next page of synced database tables. Requests first page if
+ * absent.
+ */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ public ListDatabaseCatalogsRequest setInstanceName(String instanceName) {
+ this.instanceName = instanceName;
+ return this;
+ }
+
+ public String getInstanceName() {
+ return instanceName;
+ }
+
+ public ListDatabaseCatalogsRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListDatabaseCatalogsRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListDatabaseCatalogsRequest that = (ListDatabaseCatalogsRequest) o;
+ return Objects.equals(instanceName, that.instanceName)
+ && Objects.equals(pageSize, that.pageSize)
+ && Objects.equals(pageToken, that.pageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(instanceName, pageSize, pageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListDatabaseCatalogsRequest.class)
+ .add("instanceName", instanceName)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseCatalogsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseCatalogsResponse.java
new file mode 100755
index 000000000..0eaf899c0
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseCatalogsResponse.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class ListDatabaseCatalogsResponse {
+ /** */
+ @JsonProperty("database_catalogs")
+ private Collection databaseCatalogs;
+
+ /** Pagination token to request the next page of database catalogs. */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ public ListDatabaseCatalogsResponse setDatabaseCatalogs(
+ Collection databaseCatalogs) {
+ this.databaseCatalogs = databaseCatalogs;
+ return this;
+ }
+
+ public Collection getDatabaseCatalogs() {
+ return databaseCatalogs;
+ }
+
+ public ListDatabaseCatalogsResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListDatabaseCatalogsResponse that = (ListDatabaseCatalogsResponse) o;
+ return Objects.equals(databaseCatalogs, that.databaseCatalogs)
+ && Objects.equals(nextPageToken, that.nextPageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(databaseCatalogs, nextPageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListDatabaseCatalogsResponse.class)
+ .add("databaseCatalogs", databaseCatalogs)
+ .add("nextPageToken", nextPageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListSyncedDatabaseTablesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListSyncedDatabaseTablesRequest.java
new file mode 100755
index 000000000..219805f8d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListSyncedDatabaseTablesRequest.java
@@ -0,0 +1,79 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListSyncedDatabaseTablesRequest {
+ /** Name of the instance to get synced tables for. */
+ @JsonIgnore private String instanceName;
+
+ /** Upper bound for items returned. */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /**
+ * Pagination token to go to the next page of synced database tables. Requests first page if
+ * absent.
+ */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ public ListSyncedDatabaseTablesRequest setInstanceName(String instanceName) {
+ this.instanceName = instanceName;
+ return this;
+ }
+
+ public String getInstanceName() {
+ return instanceName;
+ }
+
+ public ListSyncedDatabaseTablesRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListSyncedDatabaseTablesRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListSyncedDatabaseTablesRequest that = (ListSyncedDatabaseTablesRequest) o;
+ return Objects.equals(instanceName, that.instanceName)
+ && Objects.equals(pageSize, that.pageSize)
+ && Objects.equals(pageToken, that.pageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(instanceName, pageSize, pageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListSyncedDatabaseTablesRequest.class)
+ .add("instanceName", instanceName)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListSyncedDatabaseTablesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListSyncedDatabaseTablesResponse.java
new file mode 100755
index 000000000..fbc9093a6
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListSyncedDatabaseTablesResponse.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class ListSyncedDatabaseTablesResponse {
+ /** Pagination token to request the next page of synced tables. */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ /** */
+ @JsonProperty("synced_tables")
+ private Collection syncedTables;
+
+ public ListSyncedDatabaseTablesResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
+ public ListSyncedDatabaseTablesResponse setSyncedTables(
+ Collection syncedTables) {
+ this.syncedTables = syncedTables;
+ return this;
+ }
+
+ public Collection getSyncedTables() {
+ return syncedTables;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListSyncedDatabaseTablesResponse that = (ListSyncedDatabaseTablesResponse) o;
+ return Objects.equals(nextPageToken, that.nextPageToken)
+ && Objects.equals(syncedTables, that.syncedTables);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(nextPageToken, syncedTables);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListSyncedDatabaseTablesResponse.class)
+ .add("nextPageToken", nextPageToken)
+ .add("syncedTables", syncedTables)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseCatalogRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseCatalogRequest.java
new file mode 100755
index 000000000..49cca7c7a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseCatalogRequest.java
@@ -0,0 +1,76 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateDatabaseCatalogRequest {
+ /** Note that updating a database catalog is not yet supported. */
+ @JsonProperty("database_catalog")
+ private DatabaseCatalog databaseCatalog;
+
+ /** The name of the catalog in UC. */
+ @JsonIgnore private String name;
+
+ /** The list of fields to update. Setting this field is not yet supported. */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private String updateMask;
+
+ public UpdateDatabaseCatalogRequest setDatabaseCatalog(DatabaseCatalog databaseCatalog) {
+ this.databaseCatalog = databaseCatalog;
+ return this;
+ }
+
+ public DatabaseCatalog getDatabaseCatalog() {
+ return databaseCatalog;
+ }
+
+ public UpdateDatabaseCatalogRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public UpdateDatabaseCatalogRequest setUpdateMask(String updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public String getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateDatabaseCatalogRequest that = (UpdateDatabaseCatalogRequest) o;
+ return Objects.equals(databaseCatalog, that.databaseCatalog)
+ && Objects.equals(name, that.name)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(databaseCatalog, name, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateDatabaseCatalogRequest.class)
+ .add("databaseCatalog", databaseCatalog)
+ .add("name", name)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequest.java
index d6102575d..6f2966edb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequest.java
@@ -19,7 +19,8 @@ public class UpdateDatabaseInstanceRequest {
@JsonIgnore private String name;
/**
- * The list of fields to update. This field is not yet supported, and is ignored by the server.
+ * The list of fields to update. If unspecified, all fields will be updated when possible. To wipe
+ * out custom_tags, specify custom_tags in the update_mask with an empty custom_tags map.
*/
@JsonIgnore
@QueryParam("update_mask")
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateSyncedDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateSyncedDatabaseTableRequest.java
new file mode 100755
index 000000000..679dc16c8
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateSyncedDatabaseTableRequest.java
@@ -0,0 +1,76 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateSyncedDatabaseTableRequest {
+ /** Full three-part (catalog, schema, table) name of the table. */
+ @JsonIgnore private String name;
+
+ /** Note that updating a synced database table is not yet supported. */
+ @JsonProperty("synced_table")
+ private SyncedDatabaseTable syncedTable;
+
+ /** The list of fields to update. Setting this field is not yet supported. */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private String updateMask;
+
+ public UpdateSyncedDatabaseTableRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public UpdateSyncedDatabaseTableRequest setSyncedTable(SyncedDatabaseTable syncedTable) {
+ this.syncedTable = syncedTable;
+ return this;
+ }
+
+ public SyncedDatabaseTable getSyncedTable() {
+ return syncedTable;
+ }
+
+ public UpdateSyncedDatabaseTableRequest setUpdateMask(String updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public String getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateSyncedDatabaseTableRequest that = (UpdateSyncedDatabaseTableRequest) o;
+ return Objects.equals(name, that.name)
+ && Objects.equals(syncedTable, that.syncedTable)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name, syncedTable, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateSyncedDatabaseTableRequest.class)
+ .add("name", name)
+ .add("syncedTable", syncedTable)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java
index b724bd33d..2ecd577cc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java
@@ -187,6 +187,14 @@ public class CreateJob {
@JsonProperty("trigger")
private TriggerSettings trigger;
+ /**
+ * The id of the user specified usage policy to use for this job. If not specified, a default
+ * usage policy may be applied when creating or modifying the job. See
+ * `effective_budget_policy_id` for the budget policy used by this workload.
+ */
+ @JsonProperty("usage_policy_id")
+ private String usagePolicyId;
+
/** A collection of system notification IDs to notify when runs of this job begin or complete. */
@JsonProperty("webhook_notifications")
private WebhookNotifications webhookNotifications;
@@ -407,6 +415,15 @@ public TriggerSettings getTrigger() {
return trigger;
}
+ public CreateJob setUsagePolicyId(String usagePolicyId) {
+ this.usagePolicyId = usagePolicyId;
+ return this;
+ }
+
+ public String getUsagePolicyId() {
+ return usagePolicyId;
+ }
+
public CreateJob setWebhookNotifications(WebhookNotifications webhookNotifications) {
this.webhookNotifications = webhookNotifications;
return this;
@@ -445,6 +462,7 @@ public boolean equals(Object o) {
&& Objects.equals(tasks, that.tasks)
&& Objects.equals(timeoutSeconds, that.timeoutSeconds)
&& Objects.equals(trigger, that.trigger)
+ && Objects.equals(usagePolicyId, that.usagePolicyId)
&& Objects.equals(webhookNotifications, that.webhookNotifications);
}
@@ -475,6 +493,7 @@ public int hashCode() {
tasks,
timeoutSeconds,
trigger,
+ usagePolicyId,
webhookNotifications);
}
@@ -505,6 +524,7 @@ public String toString() {
.add("tasks", tasks)
.add("timeoutSeconds", timeoutSeconds)
.add("trigger", trigger)
+ .add("usagePolicyId", usagePolicyId)
.add("webhookNotifications", webhookNotifications)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java
index 4de5a9e4a..6c3b6dca5 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java
@@ -183,6 +183,14 @@ public class JobSettings {
@JsonProperty("trigger")
private TriggerSettings trigger;
+ /**
+ * The id of the user specified usage policy to use for this job. If not specified, a default
+ * usage policy may be applied when creating or modifying the job. See
+ * `effective_budget_policy_id` for the budget policy used by this workload.
+ */
+ @JsonProperty("usage_policy_id")
+ private String usagePolicyId;
+
/** A collection of system notification IDs to notify when runs of this job begin or complete. */
@JsonProperty("webhook_notifications")
private WebhookNotifications webhookNotifications;
@@ -394,6 +402,15 @@ public TriggerSettings getTrigger() {
return trigger;
}
+ public JobSettings setUsagePolicyId(String usagePolicyId) {
+ this.usagePolicyId = usagePolicyId;
+ return this;
+ }
+
+ public String getUsagePolicyId() {
+ return usagePolicyId;
+ }
+
public JobSettings setWebhookNotifications(WebhookNotifications webhookNotifications) {
this.webhookNotifications = webhookNotifications;
return this;
@@ -431,6 +448,7 @@ public boolean equals(Object o) {
&& Objects.equals(tasks, that.tasks)
&& Objects.equals(timeoutSeconds, that.timeoutSeconds)
&& Objects.equals(trigger, that.trigger)
+ && Objects.equals(usagePolicyId, that.usagePolicyId)
&& Objects.equals(webhookNotifications, that.webhookNotifications);
}
@@ -460,6 +478,7 @@ public int hashCode() {
tasks,
timeoutSeconds,
trigger,
+ usagePolicyId,
webhookNotifications);
}
@@ -489,6 +508,7 @@ public String toString() {
.add("tasks", tasks)
.add("timeoutSeconds", timeoutSeconds)
.add("trigger", trigger)
+ .add("usagePolicyId", usagePolicyId)
.add("webhookNotifications", webhookNotifications)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java
index 58c71b012..6befcb9c4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java
@@ -98,6 +98,13 @@ public class SubmitRun {
@JsonProperty("timeout_seconds")
private Long timeoutSeconds;
+ /**
+ * The user specified id of the usage policy to use for this one-time run. If not specified, a
+ * default usage policy may be applied when creating or modifying the job.
+ */
+ @JsonProperty("usage_policy_id")
+ private String usagePolicyId;
+
/** A collection of system notification IDs to notify when the run begins or completes. */
@JsonProperty("webhook_notifications")
private WebhookNotifications webhookNotifications;
@@ -219,6 +226,15 @@ public Long getTimeoutSeconds() {
return timeoutSeconds;
}
+ public SubmitRun setUsagePolicyId(String usagePolicyId) {
+ this.usagePolicyId = usagePolicyId;
+ return this;
+ }
+
+ public String getUsagePolicyId() {
+ return usagePolicyId;
+ }
+
public SubmitRun setWebhookNotifications(WebhookNotifications webhookNotifications) {
this.webhookNotifications = webhookNotifications;
return this;
@@ -246,6 +262,7 @@ public boolean equals(Object o) {
&& Objects.equals(runName, that.runName)
&& Objects.equals(tasks, that.tasks)
&& Objects.equals(timeoutSeconds, that.timeoutSeconds)
+ && Objects.equals(usagePolicyId, that.usagePolicyId)
&& Objects.equals(webhookNotifications, that.webhookNotifications);
}
@@ -265,6 +282,7 @@ public int hashCode() {
runName,
tasks,
timeoutSeconds,
+ usagePolicyId,
webhookNotifications);
}
@@ -284,6 +302,7 @@ public String toString() {
.add("runName", runName)
.add("tasks", tasks)
.add("timeoutSeconds", timeoutSeconds)
+ .add("usagePolicyId", usagePolicyId)
.add("webhookNotifications", webhookNotifications)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationCodeCode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationCodeCode.java
index 7d719bb94..c585a4eda 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationCodeCode.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationCodeCode.java
@@ -38,7 +38,9 @@
* invalid configuration. Refer to the state message for further details. * `CLOUD_FAILURE`: The run
* failed due to a cloud provider issue. Refer to the state message for further details. *
* `MAX_JOB_QUEUE_SIZE_EXCEEDED`: The run was skipped due to reaching the job level queue size
- * limit. * `DISABLED`: The run was never executed because it was disabled explicitly by the user.
+ * limit. * `DISABLED`: The run was never executed because it was disabled explicitly by the user. *
+ * `BREAKING_CHANGE`: Run failed because of an intentional breaking change in Spark, but it will be
+ * retried with a mitigation config.
*
*
[Link]:
* https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/PublishSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/PublishSpec.java
index 16874b850..dad437647 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/PublishSpec.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/PublishSpec.java
@@ -17,11 +17,7 @@ public class PublishSpec {
@JsonProperty("online_table_name")
private String onlineTableName;
- /**
- * The publish mode of the pipeline that syncs the online table with the source table. Defaults to
- * TRIGGERED if not specified. All publish modes require the source table to have Change Data Feed
- * (CDF) enabled.
- */
+ /** The publish mode of the pipeline that syncs the online table with the source table. */
@JsonProperty("publish_mode")
private PublishSpecPublishMode publishMode;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/PublishSpecPublishMode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/PublishSpecPublishMode.java
index 627ba03d2..47dc18d9c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/PublishSpecPublishMode.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/PublishSpecPublishMode.java
@@ -7,5 +7,6 @@
@Generated
public enum PublishSpecPublishMode {
CONTINUOUS,
+ SNAPSHOT,
TRIGGERED,
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/QueryEndpointInput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/QueryEndpointInput.java
index 0541ffc62..0cfdeb698 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/QueryEndpointInput.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/QueryEndpointInput.java
@@ -12,6 +12,13 @@
@Generated
public class QueryEndpointInput {
+ /**
+ * Optional user-provided request identifier that will be recorded in the inference table and the
+ * usage tracking table.
+ */
+ @JsonProperty("client_request_id")
+ private String clientRequestId;
+
/** Pandas Dataframe input in the records orientation. */
@JsonProperty("dataframe_records")
private Collection