diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 3b0b1fdac..864d90a5f 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -d4c86c045ee9d0410a41ef07e8ae708673b95fa1 \ No newline at end of file +6b2dbf5489ec706709fed80ee65caed7d10a2f38 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index dc8748297..c27c40bc7 100755 --- a/.gitattributes +++ b/.gitattributes @@ -197,6 +197,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ContinuousU databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalog.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateConnection.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseCatalogRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseInstanceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalLocation.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunction.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionParameterStyle.java linguist-generated=true @@ -212,6 +214,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRegis databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSchema.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredential.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSyncedDatabaseTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateTableConstraint.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContent.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfo.java linguist-generated=true @@ -221,8 +224,13 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialV databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsService.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CurrentWorkspaceBindings.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DataSourceFormat.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseCatalog.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstance.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstanceState.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccount.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountResponse.java linguist-generated=true @@ -235,6 +243,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCatal databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteConnectionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLocationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteFunctionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteMetastoreRequest.java linguist-generated=true @@ -245,6 +257,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRegis databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSchemaRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteStorageCredentialRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableConstraintRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteVolumeRequest.java linguist-generated=true @@ -268,6 +282,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLoc databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FailedStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FindDatabaseInstanceByUidRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ForeignKeyConstraint.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionDependency.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfo.java linguist-generated=true @@ -293,11 +308,13 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountM databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountStorageCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetArtifactAllowlistRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsSecurableType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetByAliasRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogWorkspaceBindingsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetConnectionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCredentialRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetDatabaseCatalogRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetDatabaseInstanceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetEffectiveRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalLocationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetFunctionRequest.java linguist-generated=true @@ -314,8 +331,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRefreshR databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRegisteredModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSchemaRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetStorageCredentialRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSyncedDatabaseTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsService.java linguist-generated=true @@ -330,6 +349,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnect databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListDatabaseInstancesRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListDatabaseInstancesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequest.java linguist-generated=true @@ -384,6 +405,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefr databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorSnapshot.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeries.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NamedTableConstraint.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NewPipelineSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTable.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpecContinuousSchedulingPolicy.java linguist-generated=true @@ -432,8 +454,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCred databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedDatabaseTable.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSchedulingPolicy.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemaInfo.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemaInfoState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasService.java linguist-generated=true @@ -460,10 +484,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TriggeredUp databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAssignmentResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateBindingsSecurableType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalogWorkspaceBindingsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCredentialRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateDatabaseInstanceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateFunction.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastore.java linguist-generated=true @@ -480,6 +505,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateTable databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateVolumeRequestContent.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindings.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsParameters.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResult.java linguist-generated=true @@ -497,7 +523,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBi databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingBindingType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsImpl.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoom.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAccessRestricted.java linguist-generated=true @@ -742,7 +767,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LogSyncStat databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/MavenLibrary.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeInstanceType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeType.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeTypeFlexibility.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PendingInstanceError.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PermanentDeleteCluster.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PermanentDeleteClusterResponse.java linguist-generated=true @@ -1019,7 +1043,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionOutpu databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsDescription.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsImpl.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PrincipalOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/RequestAuthzIdentity.java linguist-generated=true @@ -1033,6 +1056,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipa databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateRuleSetRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateWorkspaceAssignments.java linguist-generated=true @@ -1682,6 +1707,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetUpdate databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionGatewayPipelineDefinition.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionSourceType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelineEventsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelineEventsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelinesRequest.java linguist-generated=true @@ -1693,6 +1719,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/MaturityL databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/NotebookLibrary.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Notifications.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Origin.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PathPattern.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineCluster.java linguist-generated=true @@ -1840,6 +1867,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BuildLogsRe databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ChatMessage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ChatMessageRole.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CohereConfig.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreatePtEndpointRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreateServingEndpoint.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CustomProviderConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataPlaneInfo.java linguist-generated=true @@ -1882,6 +1910,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/OpenAiConfi databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PaLmConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PatchServingEndpointTags.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PayloadTable.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PtEndpointCoreConfig.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PtServedModel.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutRequest.java linguist-generated=true @@ -1921,6 +1951,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndp databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingModelWorkloadType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/TrafficConfig.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/UpdateProvisionedThroughputEndpointConfigRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/V1ResponseChoiceElement.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessEnable.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsAPI.java linguist-generated=true @@ -1998,6 +2029,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisa databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyFeaturesRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyFeaturesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteIpAccessListRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkConnectivityConfigurationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkConnectivityConfigurationResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNotificationDestinationRequest.java linguist-generated=true @@ -2081,6 +2114,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetEsmEnab databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredAccountRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredEnforceRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredWorkspaceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNetworkConnectivityConfigurationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNotificationDestinationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetPersonalComputeSettingRequest.java linguist-generated=true @@ -2106,6 +2142,18 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPublic databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokenManagementRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokensResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccount.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforce.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspace.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/MicrosoftTeamsConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAwsStableIpRule.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRule.java linguist-generated=true @@ -2178,6 +2226,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnab databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnhancedSecurityMonitoringSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEsmEnablementAccountSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateIpAccessList.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredAccountRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredEnforceRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredWorkspaceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccAzurePrivateEndpointRulePublicRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNotificationDestinationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePersonalComputeSettingRequest.java linguist-generated=true diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 5c6bef74c..98c34d24c 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -12,3 +12,40 @@ * Capture DatabricksError when retrying API calls ([#427](https://github.com/databricks/databricks-sdk-java/pull/427)). ### API Changes +* Added `accountClient.llmProxyPartnerPoweredAccount()` service, `accountClient.llmProxyPartnerPoweredEnforce()` service and `workspaceClient.llmProxyPartnerPoweredWorkspace()` service. +* Added `workspaceClient.databaseInstances()` service. +* Added `createProvisionedThroughputEndpoint()` and `updateProvisionedThroughputEndpointConfig()` methods for `workspaceClient.servingEndpoints()` service. +* Added `catalogName` field for `com.databricks.sdk.service.catalog.EnableRequest`. +* Added `sourceType` field for `com.databricks.sdk.service.pipelines.IngestionPipelineDefinition`. +* Added `glob` field for `com.databricks.sdk.service.pipelines.PipelineLibrary`. +* Added `provisionedModelUnits` field for `com.databricks.sdk.service.serving.ServedEntityInput`. +* Added `provisionedModelUnits` field for `com.databricks.sdk.service.serving.ServedEntityOutput`. +* Added `provisionedModelUnits` field for `com.databricks.sdk.service.serving.ServedModelInput`. +* Added `provisionedModelUnits` field for `com.databricks.sdk.service.serving.ServedModelOutput`. +* Added `DESCRIBE_QUERY_INVALID_SQL_ERROR`, `DESCRIBE_QUERY_TIMEOUT`, `DESCRIBE_QUERY_UNEXPECTED_FAILURE`, `INVALID_CHAT_COMPLETION_ARGUMENTS_JSON_EXCEPTION`, `INVALID_SQL_MULTIPLE_DATASET_REFERENCES_EXCEPTION`, `INVALID_SQL_MULTIPLE_STATEMENTS_EXCEPTION` and `INVALID_SQL_UNKNOWN_TABLE_EXCEPTION` enum values for `com.databricks.sdk.service.dashboards.MessageErrorType`. +* Added `CAN_CREATE` and `CAN_MONITOR_ONLY` enum values for `com.databricks.sdk.service.iam.PermissionLevel`. +* Added `SUCCESS_WITH_FAILURES` enum value for `com.databricks.sdk.service.jobs.TerminationCodeCode`. +* Added `INFRASTRUCTURE_MAINTENANCE` enum value for `com.databricks.sdk.service.pipelines.StartUpdateCause`. +* Added `INFRASTRUCTURE_MAINTENANCE` enum value for `com.databricks.sdk.service.pipelines.UpdateInfoCause`. +* [Breaking] Changed `createAlert()` and `updateAlert()` methods for `workspaceClient.alertsV2()` service with new required argument order. +* [Breaking] Changed `set()` method for `workspaceClient.permissions()` service . New request type is `com.databricks.sdk.service.iam.SetObjectPermissions` class. +* [Breaking] Changed `update()` method for `workspaceClient.permissions()` service . New request type is `com.databricks.sdk.service.iam.UpdateObjectPermissions` class. +* [Breaking] Changed `get()` method for `workspaceClient.workspaceBindings()` service to return `com.databricks.sdk.service.catalog.GetCatalogWorkspaceBindingsResponse` class. +* [Breaking] Changed `getBindings()` method for `workspaceClient.workspaceBindings()` service to return `com.databricks.sdk.service.catalog.GetWorkspaceBindingsResponse` class. +* [Breaking] Changed `update()` method for `workspaceClient.workspaceBindings()` service to return `com.databricks.sdk.service.catalog.UpdateCatalogWorkspaceBindingsResponse` class. +* [Breaking] Changed `updateBindings()` method for `workspaceClient.workspaceBindings()` service to return `com.databricks.sdk.service.catalog.UpdateWorkspaceBindingsResponse` class. +* [Breaking] Changed `securableType` field for `com.databricks.sdk.service.catalog.GetBindingsRequest` to type `String` class. +* Changed `schema` and `state` fields for `com.databricks.sdk.service.catalog.SystemSchemaInfo` to be required. +* [Breaking] Changed `state` field for `com.databricks.sdk.service.catalog.SystemSchemaInfo` to type `String` class. +* [Breaking] Changed `securableType` field for `com.databricks.sdk.service.catalog.UpdateWorkspaceBindingsParameters` to type `String` class. +* [Breaking] Changed `workspaceId` field for `com.databricks.sdk.service.catalog.WorkspaceBinding` to be required. +* [Breaking] Changed `gpuNodePoolId` field for `com.databricks.sdk.service.jobs.ComputeConfig` to no longer be required. +* Changed `gpuNodePoolId` field for `com.databricks.sdk.service.jobs.ComputeConfig` to no longer be required. +* [Breaking] Changed `alert` field for `com.databricks.sdk.service.sql.CreateAlertV2Request` to be required. +* [Breaking] Changed `alert` field for `com.databricks.sdk.service.sql.UpdateAlertV2Request` to be required. +* [Breaking] Removed `nodeTypeFlexibility` field for `com.databricks.sdk.service.compute.EditInstancePool`. +* [Breaking] Removed `nodeTypeFlexibility` field for `com.databricks.sdk.service.compute.GetInstancePool`. +* [Breaking] Removed `nodeTypeFlexibility` field for `com.databricks.sdk.service.compute.InstancePoolAndStats`. +* [Breaking] Removed `CATALOG`, `CREDENTIAL`, `EXTERNAL_LOCATION` and `STORAGE_CREDENTIAL` enum values for `com.databricks.sdk.service.catalog.GetBindingsSecurableType`. +* [Breaking] Removed `AVAILABLE`, `DISABLE_INITIALIZED`, `ENABLE_COMPLETED`, `ENABLE_INITIALIZED` and `UNAVAILABLE` enum values for `com.databricks.sdk.service.catalog.SystemSchemaInfoState`. +* [Breaking] Removed `CATALOG`, `CREDENTIAL`, `EXTERNAL_LOCATION` and `STORAGE_CREDENTIAL` enum values for `com.databricks.sdk.service.catalog.UpdateBindingsSecurableType`. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index 8e3a89c79..cc1c270b5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -18,6 +18,8 @@ import com.databricks.sdk.service.catalog.ConnectionsService; import com.databricks.sdk.service.catalog.CredentialsAPI; import com.databricks.sdk.service.catalog.CredentialsService; +import com.databricks.sdk.service.catalog.DatabaseInstancesAPI; +import com.databricks.sdk.service.catalog.DatabaseInstancesService; import com.databricks.sdk.service.catalog.ExternalLocationsAPI; import com.databricks.sdk.service.catalog.ExternalLocationsService; import com.databricks.sdk.service.catalog.FunctionsAPI; @@ -237,6 +239,7 @@ public class WorkspaceClient { private DashboardWidgetsAPI dashboardWidgetsAPI; private DashboardsAPI dashboardsAPI; private DataSourcesAPI dataSourcesAPI; + private DatabaseInstancesAPI databaseInstancesAPI; private DbfsExt dbfsAPI; private DbsqlPermissionsAPI dbsqlPermissionsAPI; private ExperimentsAPI experimentsAPI; @@ -345,6 +348,7 @@ public WorkspaceClient(DatabricksConfig config) { dashboardWidgetsAPI = new DashboardWidgetsAPI(apiClient); dashboardsAPI = new DashboardsAPI(apiClient); dataSourcesAPI = new DataSourcesAPI(apiClient); + databaseInstancesAPI = new DatabaseInstancesAPI(apiClient); dbfsAPI = new DbfsExt(apiClient); dbsqlPermissionsAPI = new DbsqlPermissionsAPI(apiClient); experimentsAPI = new ExperimentsAPI(apiClient); @@ -701,6 +705,11 @@ public DataSourcesAPI dataSources() { return dataSourcesAPI; } + /** Database Instances provide access to a database via REST API or direct SQL. */ + public DatabaseInstancesAPI databaseInstances() { + return databaseInstancesAPI; + } + /** * DBFS API makes it simple to interact with various data sources without having to include a * users credentials every time to read a file. @@ -1044,52 +1053,27 @@ public PermissionMigrationAPI permissionMigration() { /** * Permissions API are used to create read, write, edit, update and manage access for various - * users on different objects and endpoints. - * - *
* **[Apps permissions](:service:apps)** — Manage which users can manage or use apps. - * - *
* **[Cluster permissions](:service:clusters)** — Manage which users can manage, restart, or - * attach to clusters. - * - *
* **[Cluster policy permissions](:service:clusterpolicies)** — Manage which users can use - * cluster policies. - * - *
* **[Delta Live Tables pipeline permissions](:service:pipelines)** — Manage which users can - * view, manage, run, cancel, or own a Delta Live Tables pipeline. - * - *
* **[Job permissions](:service:jobs)** — Manage which users can view, manage, trigger, - * cancel, or own a job. - * - *
* **[MLflow experiment permissions](:service:experiments)** — Manage which users can read, - * edit, or manage MLflow experiments. - * - *
* **[MLflow registered model permissions](:service:modelregistry)** — Manage which users can - * read, edit, or manage MLflow registered models. - * - *
* **[Password permissions](:service:users)** — Manage which users can use password login - * when SSO is enabled. - * - *
* **[Instance Pool permissions](:service:instancepools)** — Manage which users can manage or - * attach to pools. - * - *
* **[Repo permissions](repos)** — Manage which users can read, run, edit, or manage a repo. - * - *
* **[Serving endpoint permissions](:service:servingendpoints)** — Manage which users can - * view, query, or manage a serving endpoint. - * - *
* **[SQL warehouse permissions](:service:warehouses)** — Manage which users can use or - * manage SQL warehouses. - * - *
* **[Token permissions](:service:tokenmanagement)** — Manage which users can create or use - * tokens. - * - *
* **[Workspace object permissions](:service:workspace)** — Manage which users can read, run, - * edit, or manage alerts, dbsql-dashboards, directories, files, notebooks and queries. - * - *
For the mapping of the required permissions for specific actions or abilities and other - * important information, see [Access Control]. - * - *
Note that to manage access control on service principals, use **[Account Access Control + * users on different objects and endpoints. * **[Apps permissions](:service:apps)** — Manage + * which users can manage or use apps. * **[Cluster permissions](:service:clusters)** — Manage + * which users can manage, restart, or attach to clusters. * **[Cluster policy + * permissions](:service:clusterpolicies)** — Manage which users can use cluster policies. * + * **[Delta Live Tables pipeline permissions](:service:pipelines)** — Manage which users can view, + * manage, run, cancel, or own a Delta Live Tables pipeline. * **[Job + * permissions](:service:jobs)** — Manage which users can view, manage, trigger, cancel, or own a + * job. * **[MLflow experiment permissions](:service:experiments)** — Manage which users can read, + * edit, or manage MLflow experiments. * **[MLflow registered model + * permissions](:service:modelregistry)** — Manage which users can read, edit, or manage MLflow + * registered models. * **[Instance Pool permissions](:service:instancepools)** — Manage which + * users can manage or attach to pools. * **[Repo permissions](repos)** — Manage which users can + * read, run, edit, or manage a repo. * **[Serving endpoint + * permissions](:service:servingendpoints)** — Manage which users can view, query, or manage a + * serving endpoint. * **[SQL warehouse permissions](:service:warehouses)** — Manage which users + * can use or manage SQL warehouses. * **[Token permissions](:service:tokenmanagement)** — Manage + * which users can create or use tokens. * **[Workspace object permissions](:service:workspace)** + * — Manage which users can read, run, edit, or manage alerts, dbsql-dashboards, directories, + * files, notebooks and queries. For the mapping of the required permissions for specific actions + * or abilities and other important information, see [Access Control]. Note that to manage access + * control on service principals, use **[Account Access Control * Proxy](:service:accountaccesscontrolproxy)**. * *
[Access Control]: https://docs.databricks.com/security/auth-authz/access-control/index.html @@ -1729,7 +1713,8 @@ public WorkspaceAPI workspace() { * Please use the new path (/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}) * which introduces the ability to bind a securable in READ_ONLY mode (catalogs only). * - *
Securable types that support binding: - catalog - storage_credential - external_location + *
Securable types that support binding: - catalog - storage_credential - credential -
+ * external_location
*/
public WorkspaceBindingsAPI workspaceBindings() {
return workspaceBindingsAPI;
@@ -2040,6 +2025,17 @@ public WorkspaceClient withDataSourcesAPI(DataSourcesAPI dataSources) {
return this;
}
+ /** Replace the default DatabaseInstancesService with a custom implementation. */
+ public WorkspaceClient withDatabaseInstancesImpl(DatabaseInstancesService databaseInstances) {
+ return this.withDatabaseInstancesAPI(new DatabaseInstancesAPI(databaseInstances));
+ }
+
+ /** Replace the default DatabaseInstancesAPI with a custom implementation. */
+ public WorkspaceClient withDatabaseInstancesAPI(DatabaseInstancesAPI databaseInstances) {
+ this.databaseInstancesAPI = databaseInstances;
+ return this;
+ }
+
/** Replace the default DbfsService with a custom implementation. */
public WorkspaceClient withDbfsImpl(DbfsService dbfs) {
return this.withDbfsAPI(new DbfsExt(dbfs));
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseCatalogRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseCatalogRequest.java
new file mode 100755
index 000000000..d58ef38df
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseCatalogRequest.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Create a Database Catalog */
+@Generated
+public class CreateDatabaseCatalogRequest {
+ /** */
+ @JsonProperty("catalog")
+ private DatabaseCatalog catalog;
+
+ public CreateDatabaseCatalogRequest setCatalog(DatabaseCatalog catalog) {
+ this.catalog = catalog;
+ return this;
+ }
+
+ public DatabaseCatalog getCatalog() {
+ return catalog;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateDatabaseCatalogRequest that = (CreateDatabaseCatalogRequest) o;
+ return Objects.equals(catalog, that.catalog);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(catalog);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateDatabaseCatalogRequest.class).add("catalog", catalog).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseInstanceRequest.java
new file mode 100755
index 000000000..2aa9d2a71
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseInstanceRequest.java
@@ -0,0 +1,48 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Create a Database Instance */
+@Generated
+public class CreateDatabaseInstanceRequest {
+ /**
+ * A DatabaseInstance represents a logical Postgres instance, comprised of both compute and
+ * storage.
+ */
+ @JsonProperty("database_instance")
+ private DatabaseInstance databaseInstance;
+
+ public CreateDatabaseInstanceRequest setDatabaseInstance(DatabaseInstance databaseInstance) {
+ this.databaseInstance = databaseInstance;
+ return this;
+ }
+
+ public DatabaseInstance getDatabaseInstance() {
+ return databaseInstance;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateDatabaseInstanceRequest that = (CreateDatabaseInstanceRequest) o;
+ return Objects.equals(databaseInstance, that.databaseInstance);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(databaseInstance);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateDatabaseInstanceRequest.class)
+ .add("databaseInstance", databaseInstance)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSyncedDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSyncedDatabaseTableRequest.java
new file mode 100755
index 000000000..2c1ea4700
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSyncedDatabaseTableRequest.java
@@ -0,0 +1,45 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Create a Synced Database Table */
+@Generated
+public class CreateSyncedDatabaseTableRequest {
+ /** Next field marker: 10 */
+ @JsonProperty("synced_table")
+ private SyncedDatabaseTable syncedTable;
+
+ public CreateSyncedDatabaseTableRequest setSyncedTable(SyncedDatabaseTable syncedTable) {
+ this.syncedTable = syncedTable;
+ return this;
+ }
+
+ public SyncedDatabaseTable getSyncedTable() {
+ return syncedTable;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateSyncedDatabaseTableRequest that = (CreateSyncedDatabaseTableRequest) o;
+ return Objects.equals(syncedTable, that.syncedTable);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(syncedTable);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateSyncedDatabaseTableRequest.class)
+ .add("syncedTable", syncedTable)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseCatalog.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseCatalog.java
new file mode 100755
index 000000000..b4d1c2d57
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseCatalog.java
@@ -0,0 +1,104 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class DatabaseCatalog {
+ /** */
+ @JsonProperty("create_database_if_not_exists")
+ private Boolean createDatabaseIfNotExists;
+
+ /** The name of the DatabaseInstance housing the database. */
+ @JsonProperty("database_instance_name")
+ private String databaseInstanceName;
+
+ /** The name of the database (in a instance) associated with the catalog. */
+ @JsonProperty("database_name")
+ private String databaseName;
+
+ /** The name of the catalog in UC. */
+ @JsonProperty("name")
+ private String name;
+
+ /** */
+ @JsonProperty("uid")
+ private String uid;
+
+ public DatabaseCatalog setCreateDatabaseIfNotExists(Boolean createDatabaseIfNotExists) {
+ this.createDatabaseIfNotExists = createDatabaseIfNotExists;
+ return this;
+ }
+
+ public Boolean getCreateDatabaseIfNotExists() {
+ return createDatabaseIfNotExists;
+ }
+
+ public DatabaseCatalog setDatabaseInstanceName(String databaseInstanceName) {
+ this.databaseInstanceName = databaseInstanceName;
+ return this;
+ }
+
+ public String getDatabaseInstanceName() {
+ return databaseInstanceName;
+ }
+
+ public DatabaseCatalog setDatabaseName(String databaseName) {
+ this.databaseName = databaseName;
+ return this;
+ }
+
+ public String getDatabaseName() {
+ return databaseName;
+ }
+
+ public DatabaseCatalog setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public DatabaseCatalog setUid(String uid) {
+ this.uid = uid;
+ return this;
+ }
+
+ public String getUid() {
+ return uid;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DatabaseCatalog that = (DatabaseCatalog) o;
+ return Objects.equals(createDatabaseIfNotExists, that.createDatabaseIfNotExists)
+ && Objects.equals(databaseInstanceName, that.databaseInstanceName)
+ && Objects.equals(databaseName, that.databaseName)
+ && Objects.equals(name, that.name)
+ && Objects.equals(uid, that.uid);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(createDatabaseIfNotExists, databaseInstanceName, databaseName, name, uid);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DatabaseCatalog.class)
+ .add("createDatabaseIfNotExists", createDatabaseIfNotExists)
+ .add("databaseInstanceName", databaseInstanceName)
+ .add("databaseName", databaseName)
+ .add("name", name)
+ .add("uid", uid)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstance.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstance.java
new file mode 100755
index 000000000..ce72b3cba
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstance.java
@@ -0,0 +1,208 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/**
+ * A DatabaseInstance represents a logical Postgres instance, comprised of both compute and storage.
+ */
+@Generated
+public class DatabaseInstance {
+ /** Password for admin user to create. If not provided, no user will be created. */
+ @JsonProperty("admin_password")
+ private String adminPassword;
+
+ /** Name of the admin role for the instance. If not provided, defaults to 'databricks_admin'. */
+ @JsonProperty("admin_rolename")
+ private String adminRolename;
+
+ /** The sku of the instance. Valid values are "CU_1", "CU_2", "CU_4". */
+ @JsonProperty("capacity")
+ private String capacity;
+
+ /** The timestamp when the instance was created. */
+ @JsonProperty("creation_time")
+ private String creationTime;
+
+ /** The email of the creator of the instance. */
+ @JsonProperty("creator")
+ private String creator;
+
+ /** The name of the instance. This is the unique identifier for the instance. */
+ @JsonProperty("name")
+ private String name;
+
+ /** The version of Postgres running on the instance. */
+ @JsonProperty("pg_version")
+ private String pgVersion;
+
+ /** The DNS endpoint to connect to the instance for read+write access. */
+ @JsonProperty("read_write_dns")
+ private String readWriteDns;
+
+ /** The current state of the instance. */
+ @JsonProperty("state")
+ private DatabaseInstanceState state;
+
+ /** Whether the instance is stopped. */
+ @JsonProperty("stopped")
+ private Boolean stopped;
+
+ /** An immutable UUID identifier for the instance. */
+ @JsonProperty("uid")
+ private String uid;
+
+ public DatabaseInstance setAdminPassword(String adminPassword) {
+ this.adminPassword = adminPassword;
+ return this;
+ }
+
+ public String getAdminPassword() {
+ return adminPassword;
+ }
+
+ public DatabaseInstance setAdminRolename(String adminRolename) {
+ this.adminRolename = adminRolename;
+ return this;
+ }
+
+ public String getAdminRolename() {
+ return adminRolename;
+ }
+
+ public DatabaseInstance setCapacity(String capacity) {
+ this.capacity = capacity;
+ return this;
+ }
+
+ public String getCapacity() {
+ return capacity;
+ }
+
+ public DatabaseInstance setCreationTime(String creationTime) {
+ this.creationTime = creationTime;
+ return this;
+ }
+
+ public String getCreationTime() {
+ return creationTime;
+ }
+
+ public DatabaseInstance setCreator(String creator) {
+ this.creator = creator;
+ return this;
+ }
+
+ public String getCreator() {
+ return creator;
+ }
+
+ public DatabaseInstance setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public DatabaseInstance setPgVersion(String pgVersion) {
+ this.pgVersion = pgVersion;
+ return this;
+ }
+
+ public String getPgVersion() {
+ return pgVersion;
+ }
+
+ public DatabaseInstance setReadWriteDns(String readWriteDns) {
+ this.readWriteDns = readWriteDns;
+ return this;
+ }
+
+ public String getReadWriteDns() {
+ return readWriteDns;
+ }
+
+ public DatabaseInstance setState(DatabaseInstanceState state) {
+ this.state = state;
+ return this;
+ }
+
+ public DatabaseInstanceState getState() {
+ return state;
+ }
+
+ public DatabaseInstance setStopped(Boolean stopped) {
+ this.stopped = stopped;
+ return this;
+ }
+
+ public Boolean getStopped() {
+ return stopped;
+ }
+
+ public DatabaseInstance setUid(String uid) {
+ this.uid = uid;
+ return this;
+ }
+
+ public String getUid() {
+ return uid;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DatabaseInstance that = (DatabaseInstance) o;
+ return Objects.equals(adminPassword, that.adminPassword)
+ && Objects.equals(adminRolename, that.adminRolename)
+ && Objects.equals(capacity, that.capacity)
+ && Objects.equals(creationTime, that.creationTime)
+ && Objects.equals(creator, that.creator)
+ && Objects.equals(name, that.name)
+ && Objects.equals(pgVersion, that.pgVersion)
+ && Objects.equals(readWriteDns, that.readWriteDns)
+ && Objects.equals(state, that.state)
+ && Objects.equals(stopped, that.stopped)
+ && Objects.equals(uid, that.uid);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ adminPassword,
+ adminRolename,
+ capacity,
+ creationTime,
+ creator,
+ name,
+ pgVersion,
+ readWriteDns,
+ state,
+ stopped,
+ uid);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DatabaseInstance.class)
+ .add("adminPassword", adminPassword)
+ .add("adminRolename", adminRolename)
+ .add("capacity", capacity)
+ .add("creationTime", creationTime)
+ .add("creator", creator)
+ .add("name", name)
+ .add("pgVersion", pgVersion)
+ .add("readWriteDns", readWriteDns)
+ .add("state", state)
+ .add("stopped", stopped)
+ .add("uid", uid)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstanceState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstanceState.java
new file mode 100755
index 000000000..909921d03
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstanceState.java
@@ -0,0 +1,15 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum DatabaseInstanceState {
+ AVAILABLE,
+ DELETING,
+ FAILING_OVER,
+ STARTING,
+ STOPPED,
+ UPDATING,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesAPI.java
new file mode 100755
index 000000000..c91d638d3
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesAPI.java
@@ -0,0 +1,147 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** Database Instances provide access to a database via REST API or direct SQL. */
+@Generated
+public class DatabaseInstancesAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(DatabaseInstancesAPI.class);
+
+ private final DatabaseInstancesService impl;
+
+ /** Regular-use constructor */
+ public DatabaseInstancesAPI(ApiClient apiClient) {
+ impl = new DatabaseInstancesImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public DatabaseInstancesAPI(DatabaseInstancesService mock) {
+ impl = mock;
+ }
+
+ public DatabaseCatalog createDatabaseCatalog(DatabaseCatalog catalog) {
+ return createDatabaseCatalog(new CreateDatabaseCatalogRequest().setCatalog(catalog));
+ }
+
+ /** Create a Database Catalog. */
+ public DatabaseCatalog createDatabaseCatalog(CreateDatabaseCatalogRequest request) {
+ return impl.createDatabaseCatalog(request);
+ }
+
+ public DatabaseInstance createDatabaseInstance(DatabaseInstance databaseInstance) {
+ return createDatabaseInstance(
+ new CreateDatabaseInstanceRequest().setDatabaseInstance(databaseInstance));
+ }
+
+ /** Create a Database Instance. */
+ public DatabaseInstance createDatabaseInstance(CreateDatabaseInstanceRequest request) {
+ return impl.createDatabaseInstance(request);
+ }
+
+ public SyncedDatabaseTable createSyncedDatabaseTable(SyncedDatabaseTable syncedTable) {
+ return createSyncedDatabaseTable(
+ new CreateSyncedDatabaseTableRequest().setSyncedTable(syncedTable));
+ }
+
+ /** Create a Synced Database Table. */
+ public SyncedDatabaseTable createSyncedDatabaseTable(CreateSyncedDatabaseTableRequest request) {
+ return impl.createSyncedDatabaseTable(request);
+ }
+
+ public void deleteDatabaseCatalog(String name) {
+ deleteDatabaseCatalog(new DeleteDatabaseCatalogRequest().setName(name));
+ }
+
+ /** Delete a Database Catalog. */
+ public void deleteDatabaseCatalog(DeleteDatabaseCatalogRequest request) {
+ impl.deleteDatabaseCatalog(request);
+ }
+
+ public void deleteDatabaseInstance(String name) {
+ deleteDatabaseInstance(new DeleteDatabaseInstanceRequest().setName(name));
+ }
+
+ /** Delete a Database Instance. */
+ public void deleteDatabaseInstance(DeleteDatabaseInstanceRequest request) {
+ impl.deleteDatabaseInstance(request);
+ }
+
+ public void deleteSyncedDatabaseTable(String name) {
+ deleteSyncedDatabaseTable(new DeleteSyncedDatabaseTableRequest().setName(name));
+ }
+
+ /** Delete a Synced Database Table. */
+ public void deleteSyncedDatabaseTable(DeleteSyncedDatabaseTableRequest request) {
+ impl.deleteSyncedDatabaseTable(request);
+ }
+
+ /** Find a Database Instance by uid. */
+ public DatabaseInstance findDatabaseInstanceByUid(FindDatabaseInstanceByUidRequest request) {
+ return impl.findDatabaseInstanceByUid(request);
+ }
+
+ public DatabaseCatalog getDatabaseCatalog(String name) {
+ return getDatabaseCatalog(new GetDatabaseCatalogRequest().setName(name));
+ }
+
+ /** Get a Database Catalog. */
+ public DatabaseCatalog getDatabaseCatalog(GetDatabaseCatalogRequest request) {
+ return impl.getDatabaseCatalog(request);
+ }
+
+ public DatabaseInstance getDatabaseInstance(String name) {
+ return getDatabaseInstance(new GetDatabaseInstanceRequest().setName(name));
+ }
+
+ /** Get a Database Instance. */
+ public DatabaseInstance getDatabaseInstance(GetDatabaseInstanceRequest request) {
+ return impl.getDatabaseInstance(request);
+ }
+
+ public SyncedDatabaseTable getSyncedDatabaseTable(String name) {
+ return getSyncedDatabaseTable(new GetSyncedDatabaseTableRequest().setName(name));
+ }
+
+ /** Get a Synced Database Table. */
+ public SyncedDatabaseTable getSyncedDatabaseTable(GetSyncedDatabaseTableRequest request) {
+ return impl.getSyncedDatabaseTable(request);
+ }
+
+ /** List Database Instances. */
+ public Iterable This is the high-level interface, that contains generated methods.
+ *
+ * Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface DatabaseInstancesService {
+ /** Create a Database Catalog. */
+ DatabaseCatalog createDatabaseCatalog(CreateDatabaseCatalogRequest createDatabaseCatalogRequest);
+
+ /** Create a Database Instance. */
+ DatabaseInstance createDatabaseInstance(
+ CreateDatabaseInstanceRequest createDatabaseInstanceRequest);
+
+ /** Create a Synced Database Table. */
+ SyncedDatabaseTable createSyncedDatabaseTable(
+ CreateSyncedDatabaseTableRequest createSyncedDatabaseTableRequest);
+
+ /** Delete a Database Catalog. */
+ void deleteDatabaseCatalog(DeleteDatabaseCatalogRequest deleteDatabaseCatalogRequest);
+
+ /** Delete a Database Instance. */
+ void deleteDatabaseInstance(DeleteDatabaseInstanceRequest deleteDatabaseInstanceRequest);
+
+ /** Delete a Synced Database Table. */
+ void deleteSyncedDatabaseTable(DeleteSyncedDatabaseTableRequest deleteSyncedDatabaseTableRequest);
+
+ /** Find a Database Instance by uid. */
+ DatabaseInstance findDatabaseInstanceByUid(
+ FindDatabaseInstanceByUidRequest findDatabaseInstanceByUidRequest);
+
+ /** Get a Database Catalog. */
+ DatabaseCatalog getDatabaseCatalog(GetDatabaseCatalogRequest getDatabaseCatalogRequest);
+
+ /** Get a Database Instance. */
+ DatabaseInstance getDatabaseInstance(GetDatabaseInstanceRequest getDatabaseInstanceRequest);
+
+ /** Get a Synced Database Table. */
+ SyncedDatabaseTable getSyncedDatabaseTable(
+ GetSyncedDatabaseTableRequest getSyncedDatabaseTableRequest);
+
+ /** List Database Instances. */
+ ListDatabaseInstancesResponse listDatabaseInstances(
+ ListDatabaseInstancesRequest listDatabaseInstancesRequest);
+
+ /** Update a Database Instance. */
+ DatabaseInstance updateDatabaseInstance(
+ UpdateDatabaseInstanceRequest updateDatabaseInstanceRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogRequest.java
new file mode 100755
index 000000000..3a455fea8
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogRequest.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Delete a Database Catalog */
+@Generated
+public class DeleteDatabaseCatalogRequest {
+ /** */
+ @JsonIgnore private String name;
+
+ public DeleteDatabaseCatalogRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteDatabaseCatalogRequest that = (DeleteDatabaseCatalogRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteDatabaseCatalogRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeTypeFlexibility.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogResponse.java
similarity index 56%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeTypeFlexibility.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogResponse.java
index 7366ed43d..17de1764a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeTypeFlexibility.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogResponse.java
@@ -1,18 +1,13 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-package com.databricks.sdk.service.compute;
+package com.databricks.sdk.service.catalog;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
import java.util.Objects;
-/**
- * For Fleet-V2 using classic clusters, this object contains the information about the alternate
- * node type ids to use when attempting to launch a cluster. It can be used with both the driver and
- * worker node types.
- */
@Generated
-public class NodeTypeFlexibility {
+public class DeleteDatabaseCatalogResponse {
@Override
public boolean equals(Object o) {
@@ -28,6 +23,6 @@ public int hashCode() {
@Override
public String toString() {
- return new ToStringer(NodeTypeFlexibility.class).toString();
+ return new ToStringer(DeleteDatabaseCatalogResponse.class).toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceRequest.java
new file mode 100755
index 000000000..e043e1347
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceRequest.java
@@ -0,0 +1,85 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Delete a Database Instance */
+@Generated
+public class DeleteDatabaseInstanceRequest {
+ /**
+ * By default, a instance cannot be deleted if it has descendant instances created via PITR. If
+ * this flag is specified as true, all descendent instances will be deleted as well.
+ */
+ @JsonIgnore
+ @QueryParam("force")
+ private Boolean force;
+
+ /** Name of the instance to delete. */
+ @JsonIgnore private String name;
+
+ /**
+ * If false, the database instance is soft deleted. Soft deleted instances behave as if they are
+ * deleted, and cannot be used for CRUD operations nor connected to. However they can be undeleted
+ * by calling the undelete API for a limited time. If true, the database instance is hard deleted
+ * and cannot be undeleted.
+ */
+ @JsonIgnore
+ @QueryParam("purge")
+ private Boolean purge;
+
+ public DeleteDatabaseInstanceRequest setForce(Boolean force) {
+ this.force = force;
+ return this;
+ }
+
+ public Boolean getForce() {
+ return force;
+ }
+
+ public DeleteDatabaseInstanceRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public DeleteDatabaseInstanceRequest setPurge(Boolean purge) {
+ this.purge = purge;
+ return this;
+ }
+
+ public Boolean getPurge() {
+ return purge;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteDatabaseInstanceRequest that = (DeleteDatabaseInstanceRequest) o;
+ return Objects.equals(force, that.force)
+ && Objects.equals(name, that.name)
+ && Objects.equals(purge, that.purge);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(force, name, purge);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteDatabaseInstanceRequest.class)
+ .add("force", force)
+ .add("name", name)
+ .add("purge", purge)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceResponse.java
new file mode 100755
index 000000000..4d96f2e05
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceResponse.java
@@ -0,0 +1,28 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+@Generated
+public class DeleteDatabaseInstanceResponse {
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash();
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteDatabaseInstanceResponse.class).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableRequest.java
new file mode 100755
index 000000000..506ab393b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableRequest.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Delete a Synced Database Table */
+@Generated
+public class DeleteSyncedDatabaseTableRequest {
+ /** */
+ @JsonIgnore private String name;
+
+ public DeleteSyncedDatabaseTableRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteSyncedDatabaseTableRequest that = (DeleteSyncedDatabaseTableRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteSyncedDatabaseTableRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableResponse.java
new file mode 100755
index 000000000..147f31d48
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableResponse.java
@@ -0,0 +1,28 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+@Generated
+public class DeleteSyncedDatabaseTableResponse {
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash();
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteSyncedDatabaseTableResponse.class).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableRequest.java
index 2974dbfa7..7a0a9a477 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableRequest.java
@@ -5,17 +5,30 @@
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/** Enable a system schema */
@Generated
public class EnableRequest {
+ /** the catalog for which the system schema is to enabled in */
+ @JsonProperty("catalog_name")
+ private String catalogName;
+
/** The metastore ID under which the system schema lives. */
@JsonIgnore private String metastoreId;
/** Full name of the system schema. */
@JsonIgnore private String schemaName;
+ public EnableRequest setCatalogName(String catalogName) {
+ this.catalogName = catalogName;
+ return this;
+ }
+
+ public String getCatalogName() {
+ return catalogName;
+ }
+
public EnableRequest setMetastoreId(String metastoreId) {
this.metastoreId = metastoreId;
return this;
@@ -39,18 +52,20 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
EnableRequest that = (EnableRequest) o;
- return Objects.equals(metastoreId, that.metastoreId)
+ return Objects.equals(catalogName, that.catalogName)
+ && Objects.equals(metastoreId, that.metastoreId)
&& Objects.equals(schemaName, that.schemaName);
}
@Override
public int hashCode() {
- return Objects.hash(metastoreId, schemaName);
+ return Objects.hash(catalogName, metastoreId, schemaName);
}
@Override
public String toString() {
return new ToStringer(EnableRequest.class)
+ .add("catalogName", catalogName)
.add("metastoreId", metastoreId)
.add("schemaName", schemaName)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FindDatabaseInstanceByUidRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FindDatabaseInstanceByUidRequest.java
new file mode 100755
index 000000000..894cb8153
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FindDatabaseInstanceByUidRequest.java
@@ -0,0 +1,45 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Find a Database Instance by uid */
+@Generated
+public class FindDatabaseInstanceByUidRequest {
+ /** UID of the cluster to get. */
+ @JsonIgnore
+ @QueryParam("uid")
+ private String uid;
+
+ public FindDatabaseInstanceByUidRequest setUid(String uid) {
+ this.uid = uid;
+ return this;
+ }
+
+ public String getUid() {
+ return uid;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ FindDatabaseInstanceByUidRequest that = (FindDatabaseInstanceByUidRequest) o;
+ return Objects.equals(uid, that.uid);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(uid);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(FindDatabaseInstanceByUidRequest.class).add("uid", uid).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsRequest.java
index 5bdae1c43..f0fad5bb6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsRequest.java
@@ -30,8 +30,11 @@ public class GetBindingsRequest {
/** The name of the securable. */
@JsonIgnore private String securableName;
- /** The type of the securable to bind to a workspace. */
- @JsonIgnore private GetBindingsSecurableType securableType;
+ /**
+ * The type of the securable to bind to a workspace (catalog, storage_credential, credential, or
+ * external_location).
+ */
+ @JsonIgnore private String securableType;
public GetBindingsRequest setMaxResults(Long maxResults) {
this.maxResults = maxResults;
@@ -60,12 +63,12 @@ public String getSecurableName() {
return securableName;
}
- public GetBindingsRequest setSecurableType(GetBindingsSecurableType securableType) {
+ public GetBindingsRequest setSecurableType(String securableType) {
this.securableType = securableType;
return this;
}
- public GetBindingsSecurableType getSecurableType() {
+ public String getSecurableType() {
return securableType;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsSecurableType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsSecurableType.java
deleted file mode 100755
index d35c64de5..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsSecurableType.java
+++ /dev/null
@@ -1,21 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.catalog;
-
-import com.databricks.sdk.support.Generated;
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-@Generated
-public enum GetBindingsSecurableType {
- @JsonProperty("catalog")
- CATALOG,
-
- @JsonProperty("credential")
- CREDENTIAL,
-
- @JsonProperty("external_location")
- EXTERNAL_LOCATION,
-
- @JsonProperty("storage_credential")
- STORAGE_CREDENTIAL,
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CurrentWorkspaceBindings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogWorkspaceBindingsResponse.java
similarity index 68%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CurrentWorkspaceBindings.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogWorkspaceBindingsResponse.java
index 09bc30eda..b2aedb9a5 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CurrentWorkspaceBindings.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogWorkspaceBindingsResponse.java
@@ -8,14 +8,13 @@
import java.util.Collection;
import java.util.Objects;
-/** Currently assigned workspaces */
@Generated
-public class CurrentWorkspaceBindings {
- /** A list of workspace IDs. */
+public class GetCatalogWorkspaceBindingsResponse {
+ /** A list of workspace IDs */
@JsonProperty("workspaces")
private Collection When creating a synced table in a registered Postgres catalog, the target Postgres database
+ * name is inferred to be that of the registered catalog. If this field is specified in this
+ * scenario, the Postgres database name MUST match that of the registered catalog (or the request
+ * will be rejected).
+ *
+ * When creating a synced table in a standard catalog, the target database name is inferred to
+ * be that of the standard catalog. In this scenario, specifying this field will allow targeting
+ * an arbitrary postgres database.
+ */
+ @JsonProperty("logical_database_name")
+ private String logicalDatabaseName;
+
+ /** Full three-part (catalog, schema, table) name of the table. */
+ @JsonProperty("name")
+ private String name;
+
+ /** Specification of a synced database table. */
+ @JsonProperty("spec")
+ private SyncedTableSpec spec;
+
+ /** Data serving REST API URL for this table */
+ @JsonProperty("table_serving_url")
+ private String tableServingUrl;
+
+ /**
+ * The provisioning state of the synced table entity in Unity Catalog. This is distinct from the
+ * state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline
+ * may be in "PROVISIONING" as it runs asynchronously).
+ */
+ @JsonProperty("unity_catalog_provisioning_state")
+ private ProvisioningInfoState unityCatalogProvisioningState;
+
+ public SyncedDatabaseTable setDataSynchronizationStatus(
+ OnlineTableStatus dataSynchronizationStatus) {
+ this.dataSynchronizationStatus = dataSynchronizationStatus;
+ return this;
+ }
+
+ public OnlineTableStatus getDataSynchronizationStatus() {
+ return dataSynchronizationStatus;
+ }
+
+ public SyncedDatabaseTable setDatabaseInstanceName(String databaseInstanceName) {
+ this.databaseInstanceName = databaseInstanceName;
+ return this;
+ }
+
+ public String getDatabaseInstanceName() {
+ return databaseInstanceName;
+ }
+
+ public SyncedDatabaseTable setLogicalDatabaseName(String logicalDatabaseName) {
+ this.logicalDatabaseName = logicalDatabaseName;
+ return this;
+ }
+
+ public String getLogicalDatabaseName() {
+ return logicalDatabaseName;
+ }
+
+ public SyncedDatabaseTable setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public SyncedDatabaseTable setSpec(SyncedTableSpec spec) {
+ this.spec = spec;
+ return this;
+ }
+
+ public SyncedTableSpec getSpec() {
+ return spec;
+ }
+
+ public SyncedDatabaseTable setTableServingUrl(String tableServingUrl) {
+ this.tableServingUrl = tableServingUrl;
+ return this;
+ }
+
+ public String getTableServingUrl() {
+ return tableServingUrl;
+ }
+
+ public SyncedDatabaseTable setUnityCatalogProvisioningState(
+ ProvisioningInfoState unityCatalogProvisioningState) {
+ this.unityCatalogProvisioningState = unityCatalogProvisioningState;
+ return this;
+ }
+
+ public ProvisioningInfoState getUnityCatalogProvisioningState() {
+ return unityCatalogProvisioningState;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ SyncedDatabaseTable that = (SyncedDatabaseTable) o;
+ return Objects.equals(dataSynchronizationStatus, that.dataSynchronizationStatus)
+ && Objects.equals(databaseInstanceName, that.databaseInstanceName)
+ && Objects.equals(logicalDatabaseName, that.logicalDatabaseName)
+ && Objects.equals(name, that.name)
+ && Objects.equals(spec, that.spec)
+ && Objects.equals(tableServingUrl, that.tableServingUrl)
+ && Objects.equals(unityCatalogProvisioningState, that.unityCatalogProvisioningState);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ dataSynchronizationStatus,
+ databaseInstanceName,
+ logicalDatabaseName,
+ name,
+ spec,
+ tableServingUrl,
+ unityCatalogProvisioningState);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(SyncedDatabaseTable.class)
+ .add("dataSynchronizationStatus", dataSynchronizationStatus)
+ .add("databaseInstanceName", databaseInstanceName)
+ .add("logicalDatabaseName", logicalDatabaseName)
+ .add("name", name)
+ .add("spec", spec)
+ .add("tableServingUrl", tableServingUrl)
+ .add("unityCatalogProvisioningState", unityCatalogProvisioningState)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSchedulingPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSchedulingPolicy.java
new file mode 100755
index 000000000..0f0fd271d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSchedulingPolicy.java
@@ -0,0 +1,12 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum SyncedTableSchedulingPolicy {
+ CONTINUOUS,
+ SNAPSHOT,
+ TRIGGERED,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSpec.java
new file mode 100755
index 000000000..0f7ae97ef
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSpec.java
@@ -0,0 +1,146 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Specification of a synced database table. */
+@Generated
+public class SyncedTableSpec {
+ /**
+ * If true, the synced table's logical database and schema resources in PG will be created if they
+ * do not already exist.
+ */
+ @JsonProperty("create_database_objects_if_missing")
+ private Boolean createDatabaseObjectsIfMissing;
+
+ /** Spec of new pipeline. Should be empty if pipeline_id is set */
+ @JsonProperty("new_pipeline_spec")
+ private NewPipelineSpec newPipelineSpec;
+
+ /** ID of the associated pipeline. Should be empty if new_pipeline_spec is set */
+ @JsonProperty("pipeline_id")
+ private String pipelineId;
+
+ /** Primary Key columns to be used for data insert/update in the destination. */
+ @JsonProperty("primary_key_columns")
+ private Collection Securable types that support binding: - catalog - storage_credential - external_location
+ * Securable types that support binding: - catalog - storage_credential - credential -
+ * external_location
*/
@Generated
public class WorkspaceBindingsAPI {
@@ -41,7 +42,7 @@ public WorkspaceBindingsAPI(WorkspaceBindingsService mock) {
impl = mock;
}
- public CurrentWorkspaceBindings get(String name) {
+ public GetCatalogWorkspaceBindingsResponse get(String name) {
return get(new GetWorkspaceBindingRequest().setName(name));
}
@@ -51,12 +52,11 @@ public CurrentWorkspaceBindings get(String name) {
* Gets workspace bindings of the catalog. The caller must be a metastore admin or an owner of
* the catalog.
*/
- public CurrentWorkspaceBindings get(GetWorkspaceBindingRequest request) {
+ public GetCatalogWorkspaceBindingsResponse get(GetWorkspaceBindingRequest request) {
return impl.get(request);
}
- public Iterable Updates workspace bindings of the catalog. The caller must be a metastore admin or an owner
* of the catalog.
*/
- public CurrentWorkspaceBindings update(UpdateWorkspaceBindings request) {
+ public UpdateCatalogWorkspaceBindingsResponse update(UpdateWorkspaceBindings request) {
return impl.update(request);
}
- public WorkspaceBindingsResponse updateBindings(
- UpdateBindingsSecurableType securableType, String securableName) {
+ public UpdateWorkspaceBindingsResponse updateBindings(
+ String securableType, String securableName) {
return updateBindings(
new UpdateWorkspaceBindingsParameters()
.setSecurableType(securableType)
@@ -109,7 +109,7 @@ public WorkspaceBindingsResponse updateBindings(
* Updates workspace bindings of the securable. The caller must be a metastore admin or an
* owner of the securable.
*/
- public WorkspaceBindingsResponse updateBindings(UpdateWorkspaceBindingsParameters request) {
+ public UpdateWorkspaceBindingsResponse updateBindings(UpdateWorkspaceBindingsParameters request) {
return impl.updateBindings(request);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsImpl.java
index 7516c5686..ab6b07cbd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsImpl.java
@@ -17,21 +17,21 @@ public WorkspaceBindingsImpl(ApiClient apiClient) {
}
@Override
- public CurrentWorkspaceBindings get(GetWorkspaceBindingRequest request) {
+ public GetCatalogWorkspaceBindingsResponse get(GetWorkspaceBindingRequest request) {
String path =
String.format("/api/2.1/unity-catalog/workspace-bindings/catalogs/%s", request.getName());
try {
Request req = new Request("GET", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- return apiClient.execute(req, CurrentWorkspaceBindings.class);
+ return apiClient.execute(req, GetCatalogWorkspaceBindingsResponse.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
}
@Override
- public WorkspaceBindingsResponse getBindings(GetBindingsRequest request) {
+ public GetWorkspaceBindingsResponse getBindings(GetBindingsRequest request) {
String path =
String.format(
"/api/2.1/unity-catalog/bindings/%s/%s",
@@ -40,14 +40,14 @@ public WorkspaceBindingsResponse getBindings(GetBindingsRequest request) {
Request req = new Request("GET", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- return apiClient.execute(req, WorkspaceBindingsResponse.class);
+ return apiClient.execute(req, GetWorkspaceBindingsResponse.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
}
@Override
- public CurrentWorkspaceBindings update(UpdateWorkspaceBindings request) {
+ public UpdateCatalogWorkspaceBindingsResponse update(UpdateWorkspaceBindings request) {
String path =
String.format("/api/2.1/unity-catalog/workspace-bindings/catalogs/%s", request.getName());
try {
@@ -55,14 +55,14 @@ public CurrentWorkspaceBindings update(UpdateWorkspaceBindings request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- return apiClient.execute(req, CurrentWorkspaceBindings.class);
+ return apiClient.execute(req, UpdateCatalogWorkspaceBindingsResponse.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
}
@Override
- public WorkspaceBindingsResponse updateBindings(UpdateWorkspaceBindingsParameters request) {
+ public UpdateWorkspaceBindingsResponse updateBindings(UpdateWorkspaceBindingsParameters request) {
String path =
String.format(
"/api/2.1/unity-catalog/bindings/%s/%s",
@@ -72,7 +72,7 @@ public WorkspaceBindingsResponse updateBindings(UpdateWorkspaceBindingsParameter
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- return apiClient.execute(req, WorkspaceBindingsResponse.class);
+ return apiClient.execute(req, UpdateWorkspaceBindingsResponse.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsService.java
index 9ab53e070..962b4ea69 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsService.java
@@ -19,7 +19,8 @@
* Please use the new path (/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}) which
* introduces the ability to bind a securable in READ_ONLY mode (catalogs only).
*
- * Securable types that support binding: - catalog - storage_credential - external_location
+ * Securable types that support binding: - catalog - storage_credential - credential -
+ * external_location
*
* This is the high-level interface, that contains generated methods.
*
@@ -33,7 +34,7 @@ public interface WorkspaceBindingsService {
* Gets workspace bindings of the catalog. The caller must be a metastore admin or an owner of
* the catalog.
*/
- CurrentWorkspaceBindings get(GetWorkspaceBindingRequest getWorkspaceBindingRequest);
+ GetCatalogWorkspaceBindingsResponse get(GetWorkspaceBindingRequest getWorkspaceBindingRequest);
/**
* Get securable workspace bindings.
@@ -41,7 +42,7 @@ public interface WorkspaceBindingsService {
* Gets workspace bindings of the securable. The caller must be a metastore admin or an owner
* of the securable.
*/
- WorkspaceBindingsResponse getBindings(GetBindingsRequest getBindingsRequest);
+ GetWorkspaceBindingsResponse getBindings(GetBindingsRequest getBindingsRequest);
/**
* Update catalog workspace bindings.
@@ -49,7 +50,7 @@ public interface WorkspaceBindingsService {
* Updates workspace bindings of the catalog. The caller must be a metastore admin or an owner
* of the catalog.
*/
- CurrentWorkspaceBindings update(UpdateWorkspaceBindings updateWorkspaceBindings);
+ UpdateCatalogWorkspaceBindingsResponse update(UpdateWorkspaceBindings updateWorkspaceBindings);
/**
* Update securable workspace bindings.
@@ -57,6 +58,6 @@ public interface WorkspaceBindingsService {
* Updates workspace bindings of the securable. The caller must be a metastore admin or an
* owner of the securable.
*/
- WorkspaceBindingsResponse updateBindings(
+ UpdateWorkspaceBindingsResponse updateBindings(
UpdateWorkspaceBindingsParameters updateWorkspaceBindingsParameters);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePool.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePool.java
index d6daf72a8..5f1c2d218 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePool.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePool.java
@@ -52,13 +52,6 @@ public class EditInstancePool {
@JsonProperty("min_idle_instances")
private Long minIdleInstances;
- /**
- * For Fleet-pool V2, this object contains the information about the alternate node type ids to
- * use when attempting to launch a cluster if the node type id is not available.
- */
- @JsonProperty("node_type_flexibility")
- private NodeTypeFlexibility nodeTypeFlexibility;
-
/**
* This field encodes, through a single value, the resources available to each of the Spark nodes
* in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or
@@ -123,15 +116,6 @@ public Long getMinIdleInstances() {
return minIdleInstances;
}
- public EditInstancePool setNodeTypeFlexibility(NodeTypeFlexibility nodeTypeFlexibility) {
- this.nodeTypeFlexibility = nodeTypeFlexibility;
- return this;
- }
-
- public NodeTypeFlexibility getNodeTypeFlexibility() {
- return nodeTypeFlexibility;
- }
-
public EditInstancePool setNodeTypeId(String nodeTypeId) {
this.nodeTypeId = nodeTypeId;
return this;
@@ -153,7 +137,6 @@ public boolean equals(Object o) {
&& Objects.equals(instancePoolName, that.instancePoolName)
&& Objects.equals(maxCapacity, that.maxCapacity)
&& Objects.equals(minIdleInstances, that.minIdleInstances)
- && Objects.equals(nodeTypeFlexibility, that.nodeTypeFlexibility)
&& Objects.equals(nodeTypeId, that.nodeTypeId);
}
@@ -166,7 +149,6 @@ public int hashCode() {
instancePoolName,
maxCapacity,
minIdleInstances,
- nodeTypeFlexibility,
nodeTypeId);
}
@@ -179,7 +161,6 @@ public String toString() {
.add("instancePoolName", instancePoolName)
.add("maxCapacity", maxCapacity)
.add("minIdleInstances", minIdleInstances)
- .add("nodeTypeFlexibility", nodeTypeFlexibility)
.add("nodeTypeId", nodeTypeId)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java
index 25779f5c7..fb3c4aa48 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java
@@ -36,6 +36,16 @@ public class Environment {
@JsonProperty("dependencies")
private Collection * **[Apps permissions](:service:apps)** — Manage which users can manage or use apps.
- *
- * * **[Cluster permissions](:service:clusters)** — Manage which users can manage, restart, or
- * attach to clusters.
- *
- * * **[Cluster policy permissions](:service:clusterpolicies)** — Manage which users can use
- * cluster policies.
- *
- * * **[Delta Live Tables pipeline permissions](:service:pipelines)** — Manage which users can
- * view, manage, run, cancel, or own a Delta Live Tables pipeline.
- *
- * * **[Job permissions](:service:jobs)** — Manage which users can view, manage, trigger, cancel,
- * or own a job.
- *
- * * **[MLflow experiment permissions](:service:experiments)** — Manage which users can read,
- * edit, or manage MLflow experiments.
- *
- * * **[MLflow registered model permissions](:service:modelregistry)** — Manage which users can
- * read, edit, or manage MLflow registered models.
- *
- * * **[Password permissions](:service:users)** — Manage which users can use password login when
- * SSO is enabled.
- *
- * * **[Instance Pool permissions](:service:instancepools)** — Manage which users can manage or
- * attach to pools.
- *
- * * **[Repo permissions](repos)** — Manage which users can read, run, edit, or manage a repo.
- *
- * * **[Serving endpoint permissions](:service:servingendpoints)** — Manage which users can view,
- * query, or manage a serving endpoint.
- *
- * * **[SQL warehouse permissions](:service:warehouses)** — Manage which users can use or manage
- * SQL warehouses.
- *
- * * **[Token permissions](:service:tokenmanagement)** — Manage which users can create or use
- * tokens.
- *
- * * **[Workspace object permissions](:service:workspace)** — Manage which users can read, run,
- * edit, or manage alerts, dbsql-dashboards, directories, files, notebooks and queries.
- *
- * For the mapping of the required permissions for specific actions or abilities and other
- * important information, see [Access Control].
- *
- * Note that to manage access control on service principals, use **[Account Access Control
- * Proxy](:service:accountaccesscontrolproxy)**.
+ * on different objects and endpoints. * **[Apps permissions](:service:apps)** — Manage which users
+ * can manage or use apps. * **[Cluster permissions](:service:clusters)** — Manage which users can
+ * manage, restart, or attach to clusters. * **[Cluster policy
+ * permissions](:service:clusterpolicies)** — Manage which users can use cluster policies. *
+ * **[Delta Live Tables pipeline permissions](:service:pipelines)** — Manage which users can view,
+ * manage, run, cancel, or own a Delta Live Tables pipeline. * **[Job permissions](:service:jobs)**
+ * — Manage which users can view, manage, trigger, cancel, or own a job. * **[MLflow experiment
+ * permissions](:service:experiments)** — Manage which users can read, edit, or manage MLflow
+ * experiments. * **[MLflow registered model permissions](:service:modelregistry)** — Manage which
+ * users can read, edit, or manage MLflow registered models. * **[Instance Pool
+ * permissions](:service:instancepools)** — Manage which users can manage or attach to pools. *
+ * **[Repo permissions](repos)** — Manage which users can read, run, edit, or manage a repo. *
+ * **[Serving endpoint permissions](:service:servingendpoints)** — Manage which users can view,
+ * query, or manage a serving endpoint. * **[SQL warehouse permissions](:service:warehouses)** —
+ * Manage which users can use or manage SQL warehouses. * **[Token
+ * permissions](:service:tokenmanagement)** — Manage which users can create or use tokens. *
+ * **[Workspace object permissions](:service:workspace)** — Manage which users can read, run, edit,
+ * or manage alerts, dbsql-dashboards, directories, files, notebooks and queries. For the mapping of
+ * the required permissions for specific actions or abilities and other important information, see
+ * [Access Control]. Note that to manage access control on service principals, use **[Account Access
+ * Control Proxy](:service:accountaccesscontrolproxy)**.
*
* [Access Control]: https://docs.databricks.com/security/auth-authz/access-control/index.html
*/
@@ -110,7 +84,7 @@ public GetPermissionLevelsResponse getPermissionLevels(GetPermissionLevelsReques
public ObjectPermissions set(String requestObjectType, String requestObjectId) {
return set(
- new PermissionsRequest()
+ new SetObjectPermissions()
.setRequestObjectType(requestObjectType)
.setRequestObjectId(requestObjectId));
}
@@ -122,13 +96,13 @@ public ObjectPermissions set(String requestObjectType, String requestObjectId) {
* direct permissions if none are specified. Objects can inherit permissions from their parent
* objects or root object.
*/
- public ObjectPermissions set(PermissionsRequest request) {
+ public ObjectPermissions set(SetObjectPermissions request) {
return impl.set(request);
}
public ObjectPermissions update(String requestObjectType, String requestObjectId) {
return update(
- new PermissionsRequest()
+ new UpdateObjectPermissions()
.setRequestObjectType(requestObjectType)
.setRequestObjectId(requestObjectId));
}
@@ -139,7 +113,7 @@ public ObjectPermissions update(String requestObjectType, String requestObjectId
* Updates the permissions on an object. Objects can inherit permissions from their parent
* objects or root object.
*/
- public ObjectPermissions update(PermissionsRequest request) {
+ public ObjectPermissions update(UpdateObjectPermissions request) {
return impl.update(request);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsImpl.java
index af9d0c100..379ed77ad 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsImpl.java
@@ -49,7 +49,7 @@ public GetPermissionLevelsResponse getPermissionLevels(GetPermissionLevelsReques
}
@Override
- public ObjectPermissions set(PermissionsRequest request) {
+ public ObjectPermissions set(SetObjectPermissions request) {
String path =
String.format(
"/api/2.0/permissions/%s/%s",
@@ -66,7 +66,7 @@ public ObjectPermissions set(PermissionsRequest request) {
}
@Override
- public ObjectPermissions update(PermissionsRequest request) {
+ public ObjectPermissions update(UpdateObjectPermissions request) {
String path =
String.format(
"/api/2.0/permissions/%s/%s",
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java
index 03ccea94e..592dcea96 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java
@@ -5,53 +5,27 @@
/**
* Permissions API are used to create read, write, edit, update and manage access for various users
- * on different objects and endpoints.
- *
- * * **[Apps permissions](:service:apps)** — Manage which users can manage or use apps.
- *
- * * **[Cluster permissions](:service:clusters)** — Manage which users can manage, restart, or
- * attach to clusters.
- *
- * * **[Cluster policy permissions](:service:clusterpolicies)** — Manage which users can use
- * cluster policies.
- *
- * * **[Delta Live Tables pipeline permissions](:service:pipelines)** — Manage which users can
- * view, manage, run, cancel, or own a Delta Live Tables pipeline.
- *
- * * **[Job permissions](:service:jobs)** — Manage which users can view, manage, trigger, cancel,
- * or own a job.
- *
- * * **[MLflow experiment permissions](:service:experiments)** — Manage which users can read,
- * edit, or manage MLflow experiments.
- *
- * * **[MLflow registered model permissions](:service:modelregistry)** — Manage which users can
- * read, edit, or manage MLflow registered models.
- *
- * * **[Password permissions](:service:users)** — Manage which users can use password login when
- * SSO is enabled.
- *
- * * **[Instance Pool permissions](:service:instancepools)** — Manage which users can manage or
- * attach to pools.
- *
- * * **[Repo permissions](repos)** — Manage which users can read, run, edit, or manage a repo.
- *
- * * **[Serving endpoint permissions](:service:servingendpoints)** — Manage which users can view,
- * query, or manage a serving endpoint.
- *
- * * **[SQL warehouse permissions](:service:warehouses)** — Manage which users can use or manage
- * SQL warehouses.
- *
- * * **[Token permissions](:service:tokenmanagement)** — Manage which users can create or use
- * tokens.
- *
- * * **[Workspace object permissions](:service:workspace)** — Manage which users can read, run,
- * edit, or manage alerts, dbsql-dashboards, directories, files, notebooks and queries.
- *
- * For the mapping of the required permissions for specific actions or abilities and other
- * important information, see [Access Control].
- *
- * Note that to manage access control on service principals, use **[Account Access Control
- * Proxy](:service:accountaccesscontrolproxy)**.
+ * on different objects and endpoints. * **[Apps permissions](:service:apps)** — Manage which users
+ * can manage or use apps. * **[Cluster permissions](:service:clusters)** — Manage which users can
+ * manage, restart, or attach to clusters. * **[Cluster policy
+ * permissions](:service:clusterpolicies)** — Manage which users can use cluster policies. *
+ * **[Delta Live Tables pipeline permissions](:service:pipelines)** — Manage which users can view,
+ * manage, run, cancel, or own a Delta Live Tables pipeline. * **[Job permissions](:service:jobs)**
+ * — Manage which users can view, manage, trigger, cancel, or own a job. * **[MLflow experiment
+ * permissions](:service:experiments)** — Manage which users can read, edit, or manage MLflow
+ * experiments. * **[MLflow registered model permissions](:service:modelregistry)** — Manage which
+ * users can read, edit, or manage MLflow registered models. * **[Instance Pool
+ * permissions](:service:instancepools)** — Manage which users can manage or attach to pools. *
+ * **[Repo permissions](repos)** — Manage which users can read, run, edit, or manage a repo. *
+ * **[Serving endpoint permissions](:service:servingendpoints)** — Manage which users can view,
+ * query, or manage a serving endpoint. * **[SQL warehouse permissions](:service:warehouses)** —
+ * Manage which users can use or manage SQL warehouses. * **[Token
+ * permissions](:service:tokenmanagement)** — Manage which users can create or use tokens. *
+ * **[Workspace object permissions](:service:workspace)** — Manage which users can read, run, edit,
+ * or manage alerts, dbsql-dashboards, directories, files, notebooks and queries. For the mapping of
+ * the required permissions for specific actions or abilities and other important information, see
+ * [Access Control]. Note that to manage access control on service principals, use **[Account Access
+ * Control Proxy](:service:accountaccesscontrolproxy)**.
*
* [Access Control]: https://docs.databricks.com/security/auth-authz/access-control/index.html
*
@@ -84,7 +58,7 @@ GetPermissionLevelsResponse getPermissionLevels(
* direct permissions if none are specified. Objects can inherit permissions from their parent
* objects or root object.
*/
- ObjectPermissions set(PermissionsRequest permissionsRequest);
+ ObjectPermissions set(SetObjectPermissions setObjectPermissions);
/**
* Update object permissions.
@@ -92,5 +66,5 @@ GetPermissionLevelsResponse getPermissionLevels(
* Updates the permissions on an object. Objects can inherit permissions from their parent
* objects or root object.
*/
- ObjectPermissions update(PermissionsRequest permissionsRequest);
+ ObjectPermissions update(UpdateObjectPermissions updateObjectPermissions);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java
similarity index 85%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsRequest.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java
index 1faafbab4..0f3b404da 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java
@@ -10,7 +10,7 @@
import java.util.Objects;
@Generated
-public class PermissionsRequest {
+public class SetObjectPermissions {
/** */
@JsonProperty("access_control_list")
private Collection Updates any combination of the pt endpoint's served entities, the compute configuration of
+ * those served entities, and the endpoint's traffic config. Updates are instantaneous and
+ * endpoint should be updated instantly
+ */
+ public Wait Updates any combination of the pt endpoint's served entities, the compute configuration of
+ * those served entities, and the endpoint's traffic config. Updates are instantaneous and
+ * endpoint should be updated instantly
+ */
+ ServingEndpointDetailed updateProvisionedThroughputEndpointConfig(
+ UpdateProvisionedThroughputEndpointConfigRequest
+ updateProvisionedThroughputEndpointConfigRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/UpdateProvisionedThroughputEndpointConfigRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/UpdateProvisionedThroughputEndpointConfigRequest.java
new file mode 100755
index 000000000..997aa5fbb
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/UpdateProvisionedThroughputEndpointConfigRequest.java
@@ -0,0 +1,59 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.serving;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateProvisionedThroughputEndpointConfigRequest {
+ /** */
+ @JsonProperty("config")
+ private PtEndpointCoreConfig config;
+
+ /** The name of the pt endpoint to update. This field is required. */
+ @JsonIgnore private String name;
+
+ public UpdateProvisionedThroughputEndpointConfigRequest setConfig(PtEndpointCoreConfig config) {
+ this.config = config;
+ return this;
+ }
+
+ public PtEndpointCoreConfig getConfig() {
+ return config;
+ }
+
+ public UpdateProvisionedThroughputEndpointConfigRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateProvisionedThroughputEndpointConfigRequest that =
+ (UpdateProvisionedThroughputEndpointConfigRequest) o;
+ return Objects.equals(config, that.config) && Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(config, name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateProvisionedThroughputEndpointConfigRequest.class)
+ .add("config", config)
+ .add("name", name)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsAPI.java
index 921d60bee..b343a30ed 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsAPI.java
@@ -21,6 +21,10 @@ public class AccountSettingsAPI {
private EsmEnablementAccountAPI esmEnablementAccountAPI;
+ private LlmProxyPartnerPoweredAccountAPI llmProxyPartnerPoweredAccountAPI;
+
+ private LlmProxyPartnerPoweredEnforceAPI llmProxyPartnerPoweredEnforceAPI;
+
private PersonalComputeAPI personalComputeAPI;
/** Regular-use constructor */
@@ -35,6 +39,10 @@ public AccountSettingsAPI(ApiClient apiClient) {
esmEnablementAccountAPI = new EsmEnablementAccountAPI(apiClient);
+ llmProxyPartnerPoweredAccountAPI = new LlmProxyPartnerPoweredAccountAPI(apiClient);
+
+ llmProxyPartnerPoweredEnforceAPI = new LlmProxyPartnerPoweredEnforceAPI(apiClient);
+
personalComputeAPI = new PersonalComputeAPI(apiClient);
}
@@ -69,6 +77,19 @@ public EsmEnablementAccountAPI EsmEnablementAccount() {
return esmEnablementAccountAPI;
}
+ /** Determines if partner powered models are enabled or not for a specific account. */
+ public LlmProxyPartnerPoweredAccountAPI LlmProxyPartnerPoweredAccount() {
+ return llmProxyPartnerPoweredAccountAPI;
+ }
+
+ /**
+ * Determines if the account-level partner-powered setting value is enforced upon the
+ * workspace-level partner-powered setting.
+ */
+ public LlmProxyPartnerPoweredEnforceAPI LlmProxyPartnerPoweredEnforce() {
+ return llmProxyPartnerPoweredEnforceAPI;
+ }
+
/**
* The Personal Compute enablement setting lets you control which users can use the Personal
* Compute default policy to create compute resources.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceRequest.java
new file mode 100755
index 000000000..b746ad732
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceRequest.java
@@ -0,0 +1,55 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Delete the enable partner powered AI features workspace setting */
+@Generated
+public class DeleteLlmProxyPartnerPoweredWorkspaceRequest {
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+ * an etag from a GET request, and pass it with the DELETE request to identify the rule set
+ * version you are deleting.
+ */
+ @JsonIgnore
+ @QueryParam("etag")
+ private String etag;
+
+ public DeleteLlmProxyPartnerPoweredWorkspaceRequest setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteLlmProxyPartnerPoweredWorkspaceRequest that =
+ (DeleteLlmProxyPartnerPoweredWorkspaceRequest) o;
+ return Objects.equals(etag, that.etag);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(etag);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteLlmProxyPartnerPoweredWorkspaceRequest.class)
+ .add("etag", etag)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceResponse.java
new file mode 100755
index 000000000..10524ea66
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceResponse.java
@@ -0,0 +1,53 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** The etag is returned. */
+@Generated
+public class DeleteLlmProxyPartnerPoweredWorkspaceResponse {
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+ * an etag from a GET request, and pass it with the DELETE request to identify the rule set
+ * version you are deleting.
+ */
+ @JsonProperty("etag")
+ private String etag;
+
+ public DeleteLlmProxyPartnerPoweredWorkspaceResponse setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteLlmProxyPartnerPoweredWorkspaceResponse that =
+ (DeleteLlmProxyPartnerPoweredWorkspaceResponse) o;
+ return Objects.equals(etag, that.etag);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(etag);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteLlmProxyPartnerPoweredWorkspaceResponse.class)
+ .add("etag", etag)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredAccountRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredAccountRequest.java
new file mode 100755
index 000000000..d5fd149f4
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredAccountRequest.java
@@ -0,0 +1,54 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Get the enable partner powered AI features account setting */
+@Generated
+public class GetLlmProxyPartnerPoweredAccountRequest {
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+ * an etag from a GET request, and pass it with the DELETE request to identify the rule set
+ * version you are deleting.
+ */
+ @JsonIgnore
+ @QueryParam("etag")
+ private String etag;
+
+ public GetLlmProxyPartnerPoweredAccountRequest setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetLlmProxyPartnerPoweredAccountRequest that = (GetLlmProxyPartnerPoweredAccountRequest) o;
+ return Objects.equals(etag, that.etag);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(etag);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetLlmProxyPartnerPoweredAccountRequest.class)
+ .add("etag", etag)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredEnforceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredEnforceRequest.java
new file mode 100755
index 000000000..63e690981
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredEnforceRequest.java
@@ -0,0 +1,54 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Get the enforcement status of partner powered AI features account setting */
+@Generated
+public class GetLlmProxyPartnerPoweredEnforceRequest {
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+ * an etag from a GET request, and pass it with the DELETE request to identify the rule set
+ * version you are deleting.
+ */
+ @JsonIgnore
+ @QueryParam("etag")
+ private String etag;
+
+ public GetLlmProxyPartnerPoweredEnforceRequest setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetLlmProxyPartnerPoweredEnforceRequest that = (GetLlmProxyPartnerPoweredEnforceRequest) o;
+ return Objects.equals(etag, that.etag);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(etag);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetLlmProxyPartnerPoweredEnforceRequest.class)
+ .add("etag", etag)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredWorkspaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredWorkspaceRequest.java
new file mode 100755
index 000000000..b149178f4
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredWorkspaceRequest.java
@@ -0,0 +1,54 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Get the enable partner powered AI features workspace setting */
+@Generated
+public class GetLlmProxyPartnerPoweredWorkspaceRequest {
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+ * an etag from a GET request, and pass it with the DELETE request to identify the rule set
+ * version you are deleting.
+ */
+ @JsonIgnore
+ @QueryParam("etag")
+ private String etag;
+
+ public GetLlmProxyPartnerPoweredWorkspaceRequest setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetLlmProxyPartnerPoweredWorkspaceRequest that = (GetLlmProxyPartnerPoweredWorkspaceRequest) o;
+ return Objects.equals(etag, that.etag);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(etag);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetLlmProxyPartnerPoweredWorkspaceRequest.class)
+ .add("etag", etag)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccount.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccount.java
new file mode 100755
index 000000000..adb284ade
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccount.java
@@ -0,0 +1,86 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class LlmProxyPartnerPoweredAccount {
+ /** */
+ @JsonProperty("boolean_val")
+ private BooleanMessage booleanVal;
+
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> update pattern to perform setting updates in order to avoid race conditions. That is, get an
+ * etag from a GET request, and pass it with the PATCH request to identify the setting version you
+ * are updating.
+ */
+ @JsonProperty("etag")
+ private String etag;
+
+ /**
+ * Name of the corresponding setting. This field is populated in the response, but it will not be
+ * respected even if it's set in the request body. The setting name in the path parameter will be
+ * respected instead. Setting name is required to be 'default' if the setting only has one
+ * instance per workspace.
+ */
+ @JsonProperty("setting_name")
+ private String settingName;
+
+ public LlmProxyPartnerPoweredAccount setBooleanVal(BooleanMessage booleanVal) {
+ this.booleanVal = booleanVal;
+ return this;
+ }
+
+ public BooleanMessage getBooleanVal() {
+ return booleanVal;
+ }
+
+ public LlmProxyPartnerPoweredAccount setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ public LlmProxyPartnerPoweredAccount setSettingName(String settingName) {
+ this.settingName = settingName;
+ return this;
+ }
+
+ public String getSettingName() {
+ return settingName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ LlmProxyPartnerPoweredAccount that = (LlmProxyPartnerPoweredAccount) o;
+ return Objects.equals(booleanVal, that.booleanVal)
+ && Objects.equals(etag, that.etag)
+ && Objects.equals(settingName, that.settingName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(booleanVal, etag, settingName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(LlmProxyPartnerPoweredAccount.class)
+ .add("booleanVal", booleanVal)
+ .add("etag", etag)
+ .add("settingName", settingName)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountAPI.java
new file mode 100755
index 000000000..c5b67f821
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountAPI.java
@@ -0,0 +1,56 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** Determines if partner powered models are enabled or not for a specific account */
+@Generated
+public class LlmProxyPartnerPoweredAccountAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(LlmProxyPartnerPoweredAccountAPI.class);
+
+ private final LlmProxyPartnerPoweredAccountService impl;
+
+ /** Regular-use constructor */
+ public LlmProxyPartnerPoweredAccountAPI(ApiClient apiClient) {
+ impl = new LlmProxyPartnerPoweredAccountImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public LlmProxyPartnerPoweredAccountAPI(LlmProxyPartnerPoweredAccountService mock) {
+ impl = mock;
+ }
+
+ /**
+ * Get the enable partner powered AI features account setting.
+ *
+ * Gets the enable partner powered AI features account setting.
+ */
+ public LlmProxyPartnerPoweredAccount get(GetLlmProxyPartnerPoweredAccountRequest request) {
+ return impl.get(request);
+ }
+
+ public LlmProxyPartnerPoweredAccount update(
+ boolean allowMissing, LlmProxyPartnerPoweredAccount setting, String fieldMask) {
+ return update(
+ new UpdateLlmProxyPartnerPoweredAccountRequest()
+ .setAllowMissing(allowMissing)
+ .setSetting(setting)
+ .setFieldMask(fieldMask));
+ }
+
+ /**
+ * Update the enable partner powered AI features account setting.
+ *
+ * Updates the enable partner powered AI features account setting.
+ */
+ public LlmProxyPartnerPoweredAccount update(UpdateLlmProxyPartnerPoweredAccountRequest request) {
+ return impl.update(request);
+ }
+
+ public LlmProxyPartnerPoweredAccountService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountImpl.java
new file mode 100755
index 000000000..ab253c810
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountImpl.java
@@ -0,0 +1,51 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of LlmProxyPartnerPoweredAccount */
+@Generated
+class LlmProxyPartnerPoweredAccountImpl implements LlmProxyPartnerPoweredAccountService {
+ private final ApiClient apiClient;
+
+ public LlmProxyPartnerPoweredAccountImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public LlmProxyPartnerPoweredAccount get(GetLlmProxyPartnerPoweredAccountRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/accounts/%s/settings/types/llm_proxy_partner_powered/names/default",
+ apiClient.configuredAccountID());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, LlmProxyPartnerPoweredAccount.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public LlmProxyPartnerPoweredAccount update(UpdateLlmProxyPartnerPoweredAccountRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/accounts/%s/settings/types/llm_proxy_partner_powered/names/default",
+ apiClient.configuredAccountID());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, LlmProxyPartnerPoweredAccount.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountService.java
new file mode 100755
index 000000000..075f5e07f
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountService.java
@@ -0,0 +1,30 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Determines if partner powered models are enabled or not for a specific account
+ *
+ * This is the high-level interface, that contains generated methods.
+ *
+ * Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface LlmProxyPartnerPoweredAccountService {
+ /**
+ * Get the enable partner powered AI features account setting.
+ *
+ * Gets the enable partner powered AI features account setting.
+ */
+ LlmProxyPartnerPoweredAccount get(
+ GetLlmProxyPartnerPoweredAccountRequest getLlmProxyPartnerPoweredAccountRequest);
+
+ /**
+ * Update the enable partner powered AI features account setting.
+ *
+ * Updates the enable partner powered AI features account setting.
+ */
+ LlmProxyPartnerPoweredAccount update(
+ UpdateLlmProxyPartnerPoweredAccountRequest updateLlmProxyPartnerPoweredAccountRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforce.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforce.java
new file mode 100755
index 000000000..653a3ddd6
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforce.java
@@ -0,0 +1,86 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class LlmProxyPartnerPoweredEnforce {
+ /** */
+ @JsonProperty("boolean_val")
+ private BooleanMessage booleanVal;
+
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> update pattern to perform setting updates in order to avoid race conditions. That is, get an
+ * etag from a GET request, and pass it with the PATCH request to identify the setting version you
+ * are updating.
+ */
+ @JsonProperty("etag")
+ private String etag;
+
+ /**
+ * Name of the corresponding setting. This field is populated in the response, but it will not be
+ * respected even if it's set in the request body. The setting name in the path parameter will be
+ * respected instead. Setting name is required to be 'default' if the setting only has one
+ * instance per workspace.
+ */
+ @JsonProperty("setting_name")
+ private String settingName;
+
+ public LlmProxyPartnerPoweredEnforce setBooleanVal(BooleanMessage booleanVal) {
+ this.booleanVal = booleanVal;
+ return this;
+ }
+
+ public BooleanMessage getBooleanVal() {
+ return booleanVal;
+ }
+
+ public LlmProxyPartnerPoweredEnforce setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ public LlmProxyPartnerPoweredEnforce setSettingName(String settingName) {
+ this.settingName = settingName;
+ return this;
+ }
+
+ public String getSettingName() {
+ return settingName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ LlmProxyPartnerPoweredEnforce that = (LlmProxyPartnerPoweredEnforce) o;
+ return Objects.equals(booleanVal, that.booleanVal)
+ && Objects.equals(etag, that.etag)
+ && Objects.equals(settingName, that.settingName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(booleanVal, etag, settingName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(LlmProxyPartnerPoweredEnforce.class)
+ .add("booleanVal", booleanVal)
+ .add("etag", etag)
+ .add("settingName", settingName)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceAPI.java
new file mode 100755
index 000000000..a706606fc
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceAPI.java
@@ -0,0 +1,59 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Determines if the account-level partner-powered setting value is enforced upon the
+ * workspace-level partner-powered setting
+ */
+@Generated
+public class LlmProxyPartnerPoweredEnforceAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(LlmProxyPartnerPoweredEnforceAPI.class);
+
+ private final LlmProxyPartnerPoweredEnforceService impl;
+
+ /** Regular-use constructor */
+ public LlmProxyPartnerPoweredEnforceAPI(ApiClient apiClient) {
+ impl = new LlmProxyPartnerPoweredEnforceImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public LlmProxyPartnerPoweredEnforceAPI(LlmProxyPartnerPoweredEnforceService mock) {
+ impl = mock;
+ }
+
+ /**
+ * Get the enforcement status of partner powered AI features account setting.
+ *
+ * Gets the enforcement status of partner powered AI features account setting.
+ */
+ public LlmProxyPartnerPoweredEnforce get(GetLlmProxyPartnerPoweredEnforceRequest request) {
+ return impl.get(request);
+ }
+
+ public LlmProxyPartnerPoweredEnforce update(
+ boolean allowMissing, LlmProxyPartnerPoweredEnforce setting, String fieldMask) {
+ return update(
+ new UpdateLlmProxyPartnerPoweredEnforceRequest()
+ .setAllowMissing(allowMissing)
+ .setSetting(setting)
+ .setFieldMask(fieldMask));
+ }
+
+ /**
+ * Update the enforcement status of partner powered AI features account setting.
+ *
+ * Updates the enable enforcement status of partner powered AI features account setting.
+ */
+ public LlmProxyPartnerPoweredEnforce update(UpdateLlmProxyPartnerPoweredEnforceRequest request) {
+ return impl.update(request);
+ }
+
+ public LlmProxyPartnerPoweredEnforceService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceImpl.java
new file mode 100755
index 000000000..8ffdaee6c
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceImpl.java
@@ -0,0 +1,51 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of LlmProxyPartnerPoweredEnforce */
+@Generated
+class LlmProxyPartnerPoweredEnforceImpl implements LlmProxyPartnerPoweredEnforceService {
+ private final ApiClient apiClient;
+
+ public LlmProxyPartnerPoweredEnforceImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public LlmProxyPartnerPoweredEnforce get(GetLlmProxyPartnerPoweredEnforceRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/accounts/%s/settings/types/llm_proxy_partner_powered_enforce/names/default",
+ apiClient.configuredAccountID());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, LlmProxyPartnerPoweredEnforce.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public LlmProxyPartnerPoweredEnforce update(UpdateLlmProxyPartnerPoweredEnforceRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/accounts/%s/settings/types/llm_proxy_partner_powered_enforce/names/default",
+ apiClient.configuredAccountID());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, LlmProxyPartnerPoweredEnforce.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceService.java
new file mode 100755
index 000000000..dcad9f4fc
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceService.java
@@ -0,0 +1,31 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Determines if the account-level partner-powered setting value is enforced upon the
+ * workspace-level partner-powered setting
+ *
+ * This is the high-level interface, that contains generated methods.
+ *
+ * Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface LlmProxyPartnerPoweredEnforceService {
+ /**
+ * Get the enforcement status of partner powered AI features account setting.
+ *
+ * Gets the enforcement status of partner powered AI features account setting.
+ */
+ LlmProxyPartnerPoweredEnforce get(
+ GetLlmProxyPartnerPoweredEnforceRequest getLlmProxyPartnerPoweredEnforceRequest);
+
+ /**
+ * Update the enforcement status of partner powered AI features account setting.
+ *
+ * Updates the enable enforcement status of partner powered AI features account setting.
+ */
+ LlmProxyPartnerPoweredEnforce update(
+ UpdateLlmProxyPartnerPoweredEnforceRequest updateLlmProxyPartnerPoweredEnforceRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspace.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspace.java
new file mode 100755
index 000000000..85cd8a171
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspace.java
@@ -0,0 +1,86 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class LlmProxyPartnerPoweredWorkspace {
+ /** */
+ @JsonProperty("boolean_val")
+ private BooleanMessage booleanVal;
+
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> update pattern to perform setting updates in order to avoid race conditions. That is, get an
+ * etag from a GET request, and pass it with the PATCH request to identify the setting version you
+ * are updating.
+ */
+ @JsonProperty("etag")
+ private String etag;
+
+ /**
+ * Name of the corresponding setting. This field is populated in the response, but it will not be
+ * respected even if it's set in the request body. The setting name in the path parameter will be
+ * respected instead. Setting name is required to be 'default' if the setting only has one
+ * instance per workspace.
+ */
+ @JsonProperty("setting_name")
+ private String settingName;
+
+ public LlmProxyPartnerPoweredWorkspace setBooleanVal(BooleanMessage booleanVal) {
+ this.booleanVal = booleanVal;
+ return this;
+ }
+
+ public BooleanMessage getBooleanVal() {
+ return booleanVal;
+ }
+
+ public LlmProxyPartnerPoweredWorkspace setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ public LlmProxyPartnerPoweredWorkspace setSettingName(String settingName) {
+ this.settingName = settingName;
+ return this;
+ }
+
+ public String getSettingName() {
+ return settingName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ LlmProxyPartnerPoweredWorkspace that = (LlmProxyPartnerPoweredWorkspace) o;
+ return Objects.equals(booleanVal, that.booleanVal)
+ && Objects.equals(etag, that.etag)
+ && Objects.equals(settingName, that.settingName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(booleanVal, etag, settingName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(LlmProxyPartnerPoweredWorkspace.class)
+ .add("booleanVal", booleanVal)
+ .add("etag", etag)
+ .add("settingName", settingName)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceAPI.java
new file mode 100755
index 000000000..fcb5dfbc0
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceAPI.java
@@ -0,0 +1,68 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** Determines if partner powered models are enabled or not for a specific workspace */
+@Generated
+public class LlmProxyPartnerPoweredWorkspaceAPI {
+ private static final Logger LOG =
+ LoggerFactory.getLogger(LlmProxyPartnerPoweredWorkspaceAPI.class);
+
+ private final LlmProxyPartnerPoweredWorkspaceService impl;
+
+ /** Regular-use constructor */
+ public LlmProxyPartnerPoweredWorkspaceAPI(ApiClient apiClient) {
+ impl = new LlmProxyPartnerPoweredWorkspaceImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public LlmProxyPartnerPoweredWorkspaceAPI(LlmProxyPartnerPoweredWorkspaceService mock) {
+ impl = mock;
+ }
+
+ /**
+ * Delete the enable partner powered AI features workspace setting.
+ *
+ * Reverts the enable partner powered AI features workspace setting to its default value.
+ */
+ public DeleteLlmProxyPartnerPoweredWorkspaceResponse delete(
+ DeleteLlmProxyPartnerPoweredWorkspaceRequest request) {
+ return impl.delete(request);
+ }
+
+ /**
+ * Get the enable partner powered AI features workspace setting.
+ *
+ * Gets the enable partner powered AI features workspace setting.
+ */
+ public LlmProxyPartnerPoweredWorkspace get(GetLlmProxyPartnerPoweredWorkspaceRequest request) {
+ return impl.get(request);
+ }
+
+ public LlmProxyPartnerPoweredWorkspace update(
+ boolean allowMissing, LlmProxyPartnerPoweredWorkspace setting, String fieldMask) {
+ return update(
+ new UpdateLlmProxyPartnerPoweredWorkspaceRequest()
+ .setAllowMissing(allowMissing)
+ .setSetting(setting)
+ .setFieldMask(fieldMask));
+ }
+
+ /**
+ * Update the enable partner powered AI features workspace setting.
+ *
+ * Updates the enable partner powered AI features workspace setting.
+ */
+ public LlmProxyPartnerPoweredWorkspace update(
+ UpdateLlmProxyPartnerPoweredWorkspaceRequest request) {
+ return impl.update(request);
+ }
+
+ public LlmProxyPartnerPoweredWorkspaceService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceImpl.java
new file mode 100755
index 000000000..7ebe4b415
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceImpl.java
@@ -0,0 +1,60 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of LlmProxyPartnerPoweredWorkspace */
+@Generated
+class LlmProxyPartnerPoweredWorkspaceImpl implements LlmProxyPartnerPoweredWorkspaceService {
+ private final ApiClient apiClient;
+
+ public LlmProxyPartnerPoweredWorkspaceImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public DeleteLlmProxyPartnerPoweredWorkspaceResponse delete(
+ DeleteLlmProxyPartnerPoweredWorkspaceRequest request) {
+ String path = "/api/2.0/settings/types/llm_proxy_partner_powered/names/default";
+ try {
+ Request req = new Request("DELETE", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, DeleteLlmProxyPartnerPoweredWorkspaceResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public LlmProxyPartnerPoweredWorkspace get(GetLlmProxyPartnerPoweredWorkspaceRequest request) {
+ String path = "/api/2.0/settings/types/llm_proxy_partner_powered/names/default";
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, LlmProxyPartnerPoweredWorkspace.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public LlmProxyPartnerPoweredWorkspace update(
+ UpdateLlmProxyPartnerPoweredWorkspaceRequest request) {
+ String path = "/api/2.0/settings/types/llm_proxy_partner_powered/names/default";
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, LlmProxyPartnerPoweredWorkspace.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceService.java
new file mode 100755
index 000000000..ee6083576
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceService.java
@@ -0,0 +1,38 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Determines if partner powered models are enabled or not for a specific workspace
+ *
+ * This is the high-level interface, that contains generated methods.
+ *
+ * Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface LlmProxyPartnerPoweredWorkspaceService {
+ /**
+ * Delete the enable partner powered AI features workspace setting.
+ *
+ * Reverts the enable partner powered AI features workspace setting to its default value.
+ */
+ DeleteLlmProxyPartnerPoweredWorkspaceResponse delete(
+ DeleteLlmProxyPartnerPoweredWorkspaceRequest deleteLlmProxyPartnerPoweredWorkspaceRequest);
+
+ /**
+ * Get the enable partner powered AI features workspace setting.
+ *
+ * Gets the enable partner powered AI features workspace setting.
+ */
+ LlmProxyPartnerPoweredWorkspace get(
+ GetLlmProxyPartnerPoweredWorkspaceRequest getLlmProxyPartnerPoweredWorkspaceRequest);
+
+ /**
+ * Update the enable partner powered AI features workspace setting.
+ *
+ * Updates the enable partner powered AI features workspace setting.
+ */
+ LlmProxyPartnerPoweredWorkspace update(
+ UpdateLlmProxyPartnerPoweredWorkspaceRequest updateLlmProxyPartnerPoweredWorkspaceRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java
index 81648bdf3..b0fba4f64 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java
@@ -35,6 +35,8 @@ public class SettingsAPI {
private EnhancedSecurityMonitoringAPI enhancedSecurityMonitoringAPI;
+ private LlmProxyPartnerPoweredWorkspaceAPI llmProxyPartnerPoweredWorkspaceAPI;
+
private RestrictWorkspaceAdminsAPI restrictWorkspaceAdminsAPI;
/** Regular-use constructor */
@@ -64,6 +66,8 @@ public SettingsAPI(ApiClient apiClient) {
enhancedSecurityMonitoringAPI = new EnhancedSecurityMonitoringAPI(apiClient);
+ llmProxyPartnerPoweredWorkspaceAPI = new LlmProxyPartnerPoweredWorkspaceAPI(apiClient);
+
restrictWorkspaceAdminsAPI = new RestrictWorkspaceAdminsAPI(apiClient);
}
@@ -136,6 +140,11 @@ public EnhancedSecurityMonitoringAPI EnhancedSecurityMonitoring() {
return enhancedSecurityMonitoringAPI;
}
+ /** Determines if partner powered models are enabled or not for a specific workspace. */
+ public LlmProxyPartnerPoweredWorkspaceAPI LlmProxyPartnerPoweredWorkspace() {
+ return llmProxyPartnerPoweredWorkspaceAPI;
+ }
+
/**
* The Restrict Workspace Admins setting lets you control the capabilities of workspace admins.
*/
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredAccountRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredAccountRequest.java
new file mode 100755
index 000000000..3d8db559a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredAccountRequest.java
@@ -0,0 +1,87 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Details required to update a setting. */
+@Generated
+public class UpdateLlmProxyPartnerPoweredAccountRequest {
+ /** This should always be set to true for Settings API. Added for AIP compliance. */
+ @JsonProperty("allow_missing")
+ private Boolean allowMissing;
+
+ /**
+ * The field mask must be a single string, with multiple fields separated by commas (no spaces).
+ * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+ * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not
+ * allowed, as only the entire collection field can be specified. Field names must exactly match
+ * the resource field names.
+ *
+ * A field mask of `*` indicates full replacement. It’s recommended to always explicitly list
+ * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if
+ * the API changes in the future.
+ */
+ @JsonProperty("field_mask")
+ private String fieldMask;
+
+ /** */
+ @JsonProperty("setting")
+ private LlmProxyPartnerPoweredAccount setting;
+
+ public UpdateLlmProxyPartnerPoweredAccountRequest setAllowMissing(Boolean allowMissing) {
+ this.allowMissing = allowMissing;
+ return this;
+ }
+
+ public Boolean getAllowMissing() {
+ return allowMissing;
+ }
+
+ public UpdateLlmProxyPartnerPoweredAccountRequest setFieldMask(String fieldMask) {
+ this.fieldMask = fieldMask;
+ return this;
+ }
+
+ public String getFieldMask() {
+ return fieldMask;
+ }
+
+ public UpdateLlmProxyPartnerPoweredAccountRequest setSetting(
+ LlmProxyPartnerPoweredAccount setting) {
+ this.setting = setting;
+ return this;
+ }
+
+ public LlmProxyPartnerPoweredAccount getSetting() {
+ return setting;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateLlmProxyPartnerPoweredAccountRequest that =
+ (UpdateLlmProxyPartnerPoweredAccountRequest) o;
+ return Objects.equals(allowMissing, that.allowMissing)
+ && Objects.equals(fieldMask, that.fieldMask)
+ && Objects.equals(setting, that.setting);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(allowMissing, fieldMask, setting);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateLlmProxyPartnerPoweredAccountRequest.class)
+ .add("allowMissing", allowMissing)
+ .add("fieldMask", fieldMask)
+ .add("setting", setting)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredEnforceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredEnforceRequest.java
new file mode 100755
index 000000000..c644fc32c
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredEnforceRequest.java
@@ -0,0 +1,87 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Details required to update a setting. */
+@Generated
+public class UpdateLlmProxyPartnerPoweredEnforceRequest {
+ /** This should always be set to true for Settings API. Added for AIP compliance. */
+ @JsonProperty("allow_missing")
+ private Boolean allowMissing;
+
+ /**
+ * The field mask must be a single string, with multiple fields separated by commas (no spaces).
+ * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+ * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not
+ * allowed, as only the entire collection field can be specified. Field names must exactly match
+ * the resource field names.
+ *
+ * A field mask of `*` indicates full replacement. It’s recommended to always explicitly list
+ * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if
+ * the API changes in the future.
+ */
+ @JsonProperty("field_mask")
+ private String fieldMask;
+
+ /** */
+ @JsonProperty("setting")
+ private LlmProxyPartnerPoweredEnforce setting;
+
+ public UpdateLlmProxyPartnerPoweredEnforceRequest setAllowMissing(Boolean allowMissing) {
+ this.allowMissing = allowMissing;
+ return this;
+ }
+
+ public Boolean getAllowMissing() {
+ return allowMissing;
+ }
+
+ public UpdateLlmProxyPartnerPoweredEnforceRequest setFieldMask(String fieldMask) {
+ this.fieldMask = fieldMask;
+ return this;
+ }
+
+ public String getFieldMask() {
+ return fieldMask;
+ }
+
+ public UpdateLlmProxyPartnerPoweredEnforceRequest setSetting(
+ LlmProxyPartnerPoweredEnforce setting) {
+ this.setting = setting;
+ return this;
+ }
+
+ public LlmProxyPartnerPoweredEnforce getSetting() {
+ return setting;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateLlmProxyPartnerPoweredEnforceRequest that =
+ (UpdateLlmProxyPartnerPoweredEnforceRequest) o;
+ return Objects.equals(allowMissing, that.allowMissing)
+ && Objects.equals(fieldMask, that.fieldMask)
+ && Objects.equals(setting, that.setting);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(allowMissing, fieldMask, setting);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateLlmProxyPartnerPoweredEnforceRequest.class)
+ .add("allowMissing", allowMissing)
+ .add("fieldMask", fieldMask)
+ .add("setting", setting)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredWorkspaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredWorkspaceRequest.java
new file mode 100755
index 000000000..52f1bb85a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredWorkspaceRequest.java
@@ -0,0 +1,87 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Details required to update a setting. */
+@Generated
+public class UpdateLlmProxyPartnerPoweredWorkspaceRequest {
+ /** This should always be set to true for Settings API. Added for AIP compliance. */
+ @JsonProperty("allow_missing")
+ private Boolean allowMissing;
+
+ /**
+ * The field mask must be a single string, with multiple fields separated by commas (no spaces).
+ * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+ * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not
+ * allowed, as only the entire collection field can be specified. Field names must exactly match
+ * the resource field names.
+ *
+ * A field mask of `*` indicates full replacement. It’s recommended to always explicitly list
+ * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if
+ * the API changes in the future.
+ */
+ @JsonProperty("field_mask")
+ private String fieldMask;
+
+ /** */
+ @JsonProperty("setting")
+ private LlmProxyPartnerPoweredWorkspace setting;
+
+ public UpdateLlmProxyPartnerPoweredWorkspaceRequest setAllowMissing(Boolean allowMissing) {
+ this.allowMissing = allowMissing;
+ return this;
+ }
+
+ public Boolean getAllowMissing() {
+ return allowMissing;
+ }
+
+ public UpdateLlmProxyPartnerPoweredWorkspaceRequest setFieldMask(String fieldMask) {
+ this.fieldMask = fieldMask;
+ return this;
+ }
+
+ public String getFieldMask() {
+ return fieldMask;
+ }
+
+ public UpdateLlmProxyPartnerPoweredWorkspaceRequest setSetting(
+ LlmProxyPartnerPoweredWorkspace setting) {
+ this.setting = setting;
+ return this;
+ }
+
+ public LlmProxyPartnerPoweredWorkspace getSetting() {
+ return setting;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateLlmProxyPartnerPoweredWorkspaceRequest that =
+ (UpdateLlmProxyPartnerPoweredWorkspaceRequest) o;
+ return Objects.equals(allowMissing, that.allowMissing)
+ && Objects.equals(fieldMask, that.fieldMask)
+ && Objects.equals(setting, that.setting);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(allowMissing, fieldMask, setting);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateLlmProxyPartnerPoweredWorkspaceRequest.class)
+ .add("allowMissing", allowMissing)
+ .add("fieldMask", fieldMask)
+ .add("setting", setting)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2API.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2API.java
index 3d85aeec4..756895733 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2API.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2API.java
@@ -24,6 +24,10 @@ public AlertsV2API(AlertsV2Service mock) {
impl = mock;
}
+ public AlertV2 createAlert(AlertV2 alert) {
+ return createAlert(new CreateAlertV2Request().setAlert(alert));
+ }
+
/**
* Create an alert.
*
@@ -80,8 +84,9 @@ public void trashAlert(TrashAlertV2Request request) {
impl.trashAlert(request);
}
- public AlertV2 updateAlert(String id, String updateMask) {
- return updateAlert(new UpdateAlertV2Request().setId(id).setUpdateMask(updateMask));
+ public AlertV2 updateAlert(String id, AlertV2 alert, String updateMask) {
+ return updateAlert(
+ new UpdateAlertV2Request().setId(id).setAlert(alert).setUpdateMask(updateMask));
}
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2Impl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2Impl.java
index 42c268b80..b8379503e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2Impl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2Impl.java
@@ -20,7 +20,7 @@ public AlertsV2Impl(ApiClient apiClient) {
public AlertV2 createAlert(CreateAlertV2Request request) {
String path = "/api/2.0/alerts";
try {
- Request req = new Request("POST", path, apiClient.serialize(request));
+ Request req = new Request("POST", path, apiClient.serialize(request.getAlert()));
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
@@ -73,7 +73,7 @@ public void trashAlert(TrashAlertV2Request request) {
public AlertV2 updateAlert(UpdateAlertV2Request request) {
String path = String.format("/api/2.0/alerts/%s", request.getId());
try {
- Request req = new Request("PATCH", path, apiClient.serialize(request));
+ Request req = new Request("PATCH", path, apiClient.serialize(request.getAlert()));
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertV2Request.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertV2Request.java
index e9dce84d6..10b1698ab 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertV2Request.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertV2Request.java
@@ -7,6 +7,7 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** Create an alert */
@Generated
public class CreateAlertV2Request {
/** */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertV2Request.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertV2Request.java
index a1df8b791..1e43290d8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertV2Request.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertV2Request.java
@@ -3,11 +3,13 @@
package com.databricks.sdk.service.sql;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
import com.databricks.sdk.support.ToStringer;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** Update an alert */
@Generated
public class UpdateAlertV2Request {
/** */
@@ -28,7 +30,8 @@ public class UpdateAlertV2Request {
* the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if
* the API changes in the future.
*/
- @JsonProperty("update_mask")
+ @JsonIgnore
+ @QueryParam("update_mask")
private String updateMask;
public UpdateAlertV2Request setAlert(AlertV2 alert) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListValue.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListValue.java
index a7dd16a46..f26fda411 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListValue.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListValue.java
@@ -8,13 +8,6 @@
import java.util.Collection;
import java.util.Objects;
-/**
- * copied from proto3 / Google Well Known Types, source:
- * https://github.com/protocolbuffers/protobuf/blob/450d24ca820750c5db5112a6f0b0c2efb9758021/src/google/protobuf/struct.proto
- * `ListValue` is a wrapper around a repeated field of values.
- *
- * The JSON representation for `ListValue` is JSON array.
- */
@Generated
public class ListValue {
/** Repeated field of dynamically typed values. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Struct.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Struct.java
index c7bda63c5..7e8e17153 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Struct.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Struct.java
@@ -8,16 +8,6 @@
import java.util.Collection;
import java.util.Objects;
-/**
- * copied from proto3 / Google Well Known Types, source:
- * https://github.com/protocolbuffers/protobuf/blob/450d24ca820750c5db5112a6f0b0c2efb9758021/src/google/protobuf/struct.proto
- * `Struct` represents a structured data value, consisting of fields which map to dynamically typed
- * values. In some languages, `Struct` might be supported by a native representation. For example,
- * in scripting languages like JS a struct is represented as an object. The details of that
- * representation are described together with the proto support for the language.
- *
- * The JSON representation for `Struct` is JSON object.
- */
@Generated
public class Struct {
/** Data entry, corresponding to a row in a vector index. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Value.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Value.java
index 2ab3764de..2c50b490a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Value.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Value.java
@@ -13,13 +13,7 @@ public class Value {
@JsonProperty("bool_value")
private Boolean boolValue;
- /**
- * copied from proto3 / Google Well Known Types, source:
- * https://github.com/protocolbuffers/protobuf/blob/450d24ca820750c5db5112a6f0b0c2efb9758021/src/google/protobuf/struct.proto
- * `ListValue` is a wrapper around a repeated field of values.
- *
- * The JSON representation for `ListValue` is JSON array.
- */
+ /** */
@JsonProperty("list_value")
private ListValue listValue;
@@ -31,16 +25,7 @@ public class Value {
@JsonProperty("string_value")
private String stringValue;
- /**
- * copied from proto3 / Google Well Known Types, source:
- * https://github.com/protocolbuffers/protobuf/blob/450d24ca820750c5db5112a6f0b0c2efb9758021/src/google/protobuf/struct.proto
- * `Struct` represents a structured data value, consisting of fields which map to dynamically
- * typed values. In some languages, `Struct` might be supported by a native representation. For
- * example, in scripting languages like JS a struct is represented as an object. The details of
- * that representation are described together with the proto support for the language.
- *
- * The JSON representation for `Struct` is JSON object.
- */
+ /** */
@JsonProperty("struct_value")
private Struct structValue;