diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 3b0b1fdac..864d90a5f 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -d4c86c045ee9d0410a41ef07e8ae708673b95fa1 \ No newline at end of file +6b2dbf5489ec706709fed80ee65caed7d10a2f38 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index dc8748297..c27c40bc7 100755 --- a/.gitattributes +++ b/.gitattributes @@ -197,6 +197,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ContinuousU databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalog.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateConnection.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseCatalogRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseInstanceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalLocation.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunction.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionParameterStyle.java linguist-generated=true @@ -212,6 +214,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRegis databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSchema.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredential.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSyncedDatabaseTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateTableConstraint.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContent.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfo.java linguist-generated=true @@ -221,8 +224,13 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialV databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsService.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CurrentWorkspaceBindings.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DataSourceFormat.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseCatalog.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstance.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstanceState.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccount.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountResponse.java linguist-generated=true @@ -235,6 +243,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCatal databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteConnectionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLocationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteFunctionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteMetastoreRequest.java linguist-generated=true @@ -245,6 +257,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRegis databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSchemaRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteStorageCredentialRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableConstraintRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteVolumeRequest.java linguist-generated=true @@ -268,6 +282,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLoc databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FailedStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FindDatabaseInstanceByUidRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ForeignKeyConstraint.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionDependency.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfo.java linguist-generated=true @@ -293,11 +308,13 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountM databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountStorageCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetArtifactAllowlistRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsSecurableType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetByAliasRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogWorkspaceBindingsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetConnectionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCredentialRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetDatabaseCatalogRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetDatabaseInstanceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetEffectiveRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalLocationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetFunctionRequest.java linguist-generated=true @@ -314,8 +331,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRefreshR databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRegisteredModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSchemaRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetStorageCredentialRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSyncedDatabaseTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsService.java linguist-generated=true @@ -330,6 +349,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnect databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListDatabaseInstancesRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListDatabaseInstancesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequest.java linguist-generated=true @@ -384,6 +405,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefr databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorSnapshot.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeries.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NamedTableConstraint.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NewPipelineSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTable.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpecContinuousSchedulingPolicy.java linguist-generated=true @@ -432,8 +454,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCred databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedDatabaseTable.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSchedulingPolicy.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemaInfo.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemaInfoState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasService.java linguist-generated=true @@ -460,10 +484,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TriggeredUp databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAssignmentResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateBindingsSecurableType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalogWorkspaceBindingsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCredentialRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateDatabaseInstanceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateFunction.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastore.java linguist-generated=true @@ -480,6 +505,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateTable databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateVolumeRequestContent.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindings.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsParameters.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResult.java linguist-generated=true @@ -497,7 +523,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBi databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingBindingType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsImpl.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoom.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAccessRestricted.java linguist-generated=true @@ -742,7 +767,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LogSyncStat databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/MavenLibrary.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeInstanceType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeType.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeTypeFlexibility.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PendingInstanceError.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PermanentDeleteCluster.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PermanentDeleteClusterResponse.java linguist-generated=true @@ -1019,7 +1043,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionOutpu databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsDescription.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsImpl.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PrincipalOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/RequestAuthzIdentity.java linguist-generated=true @@ -1033,6 +1056,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipa databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateRuleSetRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateWorkspaceAssignments.java linguist-generated=true @@ -1682,6 +1707,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetUpdate databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionGatewayPipelineDefinition.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionSourceType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelineEventsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelineEventsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ListPipelinesRequest.java linguist-generated=true @@ -1693,6 +1719,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/MaturityL databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/NotebookLibrary.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Notifications.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Origin.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PathPattern.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineCluster.java linguist-generated=true @@ -1840,6 +1867,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BuildLogsRe databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ChatMessage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ChatMessageRole.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CohereConfig.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreatePtEndpointRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreateServingEndpoint.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CustomProviderConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataPlaneInfo.java linguist-generated=true @@ -1882,6 +1910,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/OpenAiConfi databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PaLmConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PatchServingEndpointTags.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PayloadTable.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PtEndpointCoreConfig.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PtServedModel.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutRequest.java linguist-generated=true @@ -1921,6 +1951,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndp databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingModelWorkloadType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/TrafficConfig.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/UpdateProvisionedThroughputEndpointConfigRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/V1ResponseChoiceElement.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessEnable.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsAPI.java linguist-generated=true @@ -1998,6 +2029,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisa databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyFeaturesRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyFeaturesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteIpAccessListRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkConnectivityConfigurationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkConnectivityConfigurationResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNotificationDestinationRequest.java linguist-generated=true @@ -2081,6 +2114,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetEsmEnab databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredAccountRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredEnforceRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredWorkspaceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNetworkConnectivityConfigurationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNotificationDestinationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetPersonalComputeSettingRequest.java linguist-generated=true @@ -2106,6 +2142,18 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPublic databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokenManagementRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokensResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccount.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforce.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspace.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/MicrosoftTeamsConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAwsStableIpRule.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRule.java linguist-generated=true @@ -2178,6 +2226,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnab databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnhancedSecurityMonitoringSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEsmEnablementAccountSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateIpAccessList.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredAccountRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredEnforceRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredWorkspaceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccAzurePrivateEndpointRulePublicRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNotificationDestinationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePersonalComputeSettingRequest.java linguist-generated=true diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 5c6bef74c..98c34d24c 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -12,3 +12,40 @@ * Capture DatabricksError when retrying API calls ([#427](https://github.com/databricks/databricks-sdk-java/pull/427)). ### API Changes +* Added `accountClient.llmProxyPartnerPoweredAccount()` service, `accountClient.llmProxyPartnerPoweredEnforce()` service and `workspaceClient.llmProxyPartnerPoweredWorkspace()` service. +* Added `workspaceClient.databaseInstances()` service. +* Added `createProvisionedThroughputEndpoint()` and `updateProvisionedThroughputEndpointConfig()` methods for `workspaceClient.servingEndpoints()` service. +* Added `catalogName` field for `com.databricks.sdk.service.catalog.EnableRequest`. +* Added `sourceType` field for `com.databricks.sdk.service.pipelines.IngestionPipelineDefinition`. +* Added `glob` field for `com.databricks.sdk.service.pipelines.PipelineLibrary`. +* Added `provisionedModelUnits` field for `com.databricks.sdk.service.serving.ServedEntityInput`. +* Added `provisionedModelUnits` field for `com.databricks.sdk.service.serving.ServedEntityOutput`. +* Added `provisionedModelUnits` field for `com.databricks.sdk.service.serving.ServedModelInput`. +* Added `provisionedModelUnits` field for `com.databricks.sdk.service.serving.ServedModelOutput`. +* Added `DESCRIBE_QUERY_INVALID_SQL_ERROR`, `DESCRIBE_QUERY_TIMEOUT`, `DESCRIBE_QUERY_UNEXPECTED_FAILURE`, `INVALID_CHAT_COMPLETION_ARGUMENTS_JSON_EXCEPTION`, `INVALID_SQL_MULTIPLE_DATASET_REFERENCES_EXCEPTION`, `INVALID_SQL_MULTIPLE_STATEMENTS_EXCEPTION` and `INVALID_SQL_UNKNOWN_TABLE_EXCEPTION` enum values for `com.databricks.sdk.service.dashboards.MessageErrorType`. +* Added `CAN_CREATE` and `CAN_MONITOR_ONLY` enum values for `com.databricks.sdk.service.iam.PermissionLevel`. +* Added `SUCCESS_WITH_FAILURES` enum value for `com.databricks.sdk.service.jobs.TerminationCodeCode`. +* Added `INFRASTRUCTURE_MAINTENANCE` enum value for `com.databricks.sdk.service.pipelines.StartUpdateCause`. +* Added `INFRASTRUCTURE_MAINTENANCE` enum value for `com.databricks.sdk.service.pipelines.UpdateInfoCause`. +* [Breaking] Changed `createAlert()` and `updateAlert()` methods for `workspaceClient.alertsV2()` service with new required argument order. +* [Breaking] Changed `set()` method for `workspaceClient.permissions()` service . New request type is `com.databricks.sdk.service.iam.SetObjectPermissions` class. +* [Breaking] Changed `update()` method for `workspaceClient.permissions()` service . New request type is `com.databricks.sdk.service.iam.UpdateObjectPermissions` class. +* [Breaking] Changed `get()` method for `workspaceClient.workspaceBindings()` service to return `com.databricks.sdk.service.catalog.GetCatalogWorkspaceBindingsResponse` class. +* [Breaking] Changed `getBindings()` method for `workspaceClient.workspaceBindings()` service to return `com.databricks.sdk.service.catalog.GetWorkspaceBindingsResponse` class. +* [Breaking] Changed `update()` method for `workspaceClient.workspaceBindings()` service to return `com.databricks.sdk.service.catalog.UpdateCatalogWorkspaceBindingsResponse` class. +* [Breaking] Changed `updateBindings()` method for `workspaceClient.workspaceBindings()` service to return `com.databricks.sdk.service.catalog.UpdateWorkspaceBindingsResponse` class. +* [Breaking] Changed `securableType` field for `com.databricks.sdk.service.catalog.GetBindingsRequest` to type `String` class. +* Changed `schema` and `state` fields for `com.databricks.sdk.service.catalog.SystemSchemaInfo` to be required. +* [Breaking] Changed `state` field for `com.databricks.sdk.service.catalog.SystemSchemaInfo` to type `String` class. +* [Breaking] Changed `securableType` field for `com.databricks.sdk.service.catalog.UpdateWorkspaceBindingsParameters` to type `String` class. +* [Breaking] Changed `workspaceId` field for `com.databricks.sdk.service.catalog.WorkspaceBinding` to be required. +* [Breaking] Changed `gpuNodePoolId` field for `com.databricks.sdk.service.jobs.ComputeConfig` to no longer be required. +* Changed `gpuNodePoolId` field for `com.databricks.sdk.service.jobs.ComputeConfig` to no longer be required. +* [Breaking] Changed `alert` field for `com.databricks.sdk.service.sql.CreateAlertV2Request` to be required. +* [Breaking] Changed `alert` field for `com.databricks.sdk.service.sql.UpdateAlertV2Request` to be required. +* [Breaking] Removed `nodeTypeFlexibility` field for `com.databricks.sdk.service.compute.EditInstancePool`. +* [Breaking] Removed `nodeTypeFlexibility` field for `com.databricks.sdk.service.compute.GetInstancePool`. +* [Breaking] Removed `nodeTypeFlexibility` field for `com.databricks.sdk.service.compute.InstancePoolAndStats`. +* [Breaking] Removed `CATALOG`, `CREDENTIAL`, `EXTERNAL_LOCATION` and `STORAGE_CREDENTIAL` enum values for `com.databricks.sdk.service.catalog.GetBindingsSecurableType`. +* [Breaking] Removed `AVAILABLE`, `DISABLE_INITIALIZED`, `ENABLE_COMPLETED`, `ENABLE_INITIALIZED` and `UNAVAILABLE` enum values for `com.databricks.sdk.service.catalog.SystemSchemaInfoState`. +* [Breaking] Removed `CATALOG`, `CREDENTIAL`, `EXTERNAL_LOCATION` and `STORAGE_CREDENTIAL` enum values for `com.databricks.sdk.service.catalog.UpdateBindingsSecurableType`. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index 8e3a89c79..cc1c270b5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -18,6 +18,8 @@ import com.databricks.sdk.service.catalog.ConnectionsService; import com.databricks.sdk.service.catalog.CredentialsAPI; import com.databricks.sdk.service.catalog.CredentialsService; +import com.databricks.sdk.service.catalog.DatabaseInstancesAPI; +import com.databricks.sdk.service.catalog.DatabaseInstancesService; import com.databricks.sdk.service.catalog.ExternalLocationsAPI; import com.databricks.sdk.service.catalog.ExternalLocationsService; import com.databricks.sdk.service.catalog.FunctionsAPI; @@ -237,6 +239,7 @@ public class WorkspaceClient { private DashboardWidgetsAPI dashboardWidgetsAPI; private DashboardsAPI dashboardsAPI; private DataSourcesAPI dataSourcesAPI; + private DatabaseInstancesAPI databaseInstancesAPI; private DbfsExt dbfsAPI; private DbsqlPermissionsAPI dbsqlPermissionsAPI; private ExperimentsAPI experimentsAPI; @@ -345,6 +348,7 @@ public WorkspaceClient(DatabricksConfig config) { dashboardWidgetsAPI = new DashboardWidgetsAPI(apiClient); dashboardsAPI = new DashboardsAPI(apiClient); dataSourcesAPI = new DataSourcesAPI(apiClient); + databaseInstancesAPI = new DatabaseInstancesAPI(apiClient); dbfsAPI = new DbfsExt(apiClient); dbsqlPermissionsAPI = new DbsqlPermissionsAPI(apiClient); experimentsAPI = new ExperimentsAPI(apiClient); @@ -701,6 +705,11 @@ public DataSourcesAPI dataSources() { return dataSourcesAPI; } + /** Database Instances provide access to a database via REST API or direct SQL. */ + public DatabaseInstancesAPI databaseInstances() { + return databaseInstancesAPI; + } + /** * DBFS API makes it simple to interact with various data sources without having to include a * users credentials every time to read a file. @@ -1044,52 +1053,27 @@ public PermissionMigrationAPI permissionMigration() { /** * Permissions API are used to create read, write, edit, update and manage access for various - * users on different objects and endpoints. - * - *

* **[Apps permissions](:service:apps)** — Manage which users can manage or use apps. - * - *

* **[Cluster permissions](:service:clusters)** — Manage which users can manage, restart, or - * attach to clusters. - * - *

* **[Cluster policy permissions](:service:clusterpolicies)** — Manage which users can use - * cluster policies. - * - *

* **[Delta Live Tables pipeline permissions](:service:pipelines)** — Manage which users can - * view, manage, run, cancel, or own a Delta Live Tables pipeline. - * - *

* **[Job permissions](:service:jobs)** — Manage which users can view, manage, trigger, - * cancel, or own a job. - * - *

* **[MLflow experiment permissions](:service:experiments)** — Manage which users can read, - * edit, or manage MLflow experiments. - * - *

* **[MLflow registered model permissions](:service:modelregistry)** — Manage which users can - * read, edit, or manage MLflow registered models. - * - *

* **[Password permissions](:service:users)** — Manage which users can use password login - * when SSO is enabled. - * - *

* **[Instance Pool permissions](:service:instancepools)** — Manage which users can manage or - * attach to pools. - * - *

* **[Repo permissions](repos)** — Manage which users can read, run, edit, or manage a repo. - * - *

* **[Serving endpoint permissions](:service:servingendpoints)** — Manage which users can - * view, query, or manage a serving endpoint. - * - *

* **[SQL warehouse permissions](:service:warehouses)** — Manage which users can use or - * manage SQL warehouses. - * - *

* **[Token permissions](:service:tokenmanagement)** — Manage which users can create or use - * tokens. - * - *

* **[Workspace object permissions](:service:workspace)** — Manage which users can read, run, - * edit, or manage alerts, dbsql-dashboards, directories, files, notebooks and queries. - * - *

For the mapping of the required permissions for specific actions or abilities and other - * important information, see [Access Control]. - * - *

Note that to manage access control on service principals, use **[Account Access Control + * users on different objects and endpoints. * **[Apps permissions](:service:apps)** — Manage + * which users can manage or use apps. * **[Cluster permissions](:service:clusters)** — Manage + * which users can manage, restart, or attach to clusters. * **[Cluster policy + * permissions](:service:clusterpolicies)** — Manage which users can use cluster policies. * + * **[Delta Live Tables pipeline permissions](:service:pipelines)** — Manage which users can view, + * manage, run, cancel, or own a Delta Live Tables pipeline. * **[Job + * permissions](:service:jobs)** — Manage which users can view, manage, trigger, cancel, or own a + * job. * **[MLflow experiment permissions](:service:experiments)** — Manage which users can read, + * edit, or manage MLflow experiments. * **[MLflow registered model + * permissions](:service:modelregistry)** — Manage which users can read, edit, or manage MLflow + * registered models. * **[Instance Pool permissions](:service:instancepools)** — Manage which + * users can manage or attach to pools. * **[Repo permissions](repos)** — Manage which users can + * read, run, edit, or manage a repo. * **[Serving endpoint + * permissions](:service:servingendpoints)** — Manage which users can view, query, or manage a + * serving endpoint. * **[SQL warehouse permissions](:service:warehouses)** — Manage which users + * can use or manage SQL warehouses. * **[Token permissions](:service:tokenmanagement)** — Manage + * which users can create or use tokens. * **[Workspace object permissions](:service:workspace)** + * — Manage which users can read, run, edit, or manage alerts, dbsql-dashboards, directories, + * files, notebooks and queries. For the mapping of the required permissions for specific actions + * or abilities and other important information, see [Access Control]. Note that to manage access + * control on service principals, use **[Account Access Control * Proxy](:service:accountaccesscontrolproxy)**. * *

[Access Control]: https://docs.databricks.com/security/auth-authz/access-control/index.html @@ -1729,7 +1713,8 @@ public WorkspaceAPI workspace() { * Please use the new path (/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}) * which introduces the ability to bind a securable in READ_ONLY mode (catalogs only). * - *

Securable types that support binding: - catalog - storage_credential - external_location + *

Securable types that support binding: - catalog - storage_credential - credential - + * external_location */ public WorkspaceBindingsAPI workspaceBindings() { return workspaceBindingsAPI; @@ -2040,6 +2025,17 @@ public WorkspaceClient withDataSourcesAPI(DataSourcesAPI dataSources) { return this; } + /** Replace the default DatabaseInstancesService with a custom implementation. */ + public WorkspaceClient withDatabaseInstancesImpl(DatabaseInstancesService databaseInstances) { + return this.withDatabaseInstancesAPI(new DatabaseInstancesAPI(databaseInstances)); + } + + /** Replace the default DatabaseInstancesAPI with a custom implementation. */ + public WorkspaceClient withDatabaseInstancesAPI(DatabaseInstancesAPI databaseInstances) { + this.databaseInstancesAPI = databaseInstances; + return this; + } + /** Replace the default DbfsService with a custom implementation. */ public WorkspaceClient withDbfsImpl(DbfsService dbfs) { return this.withDbfsAPI(new DbfsExt(dbfs)); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseCatalogRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseCatalogRequest.java new file mode 100755 index 000000000..d58ef38df --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseCatalogRequest.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Create a Database Catalog */ +@Generated +public class CreateDatabaseCatalogRequest { + /** */ + @JsonProperty("catalog") + private DatabaseCatalog catalog; + + public CreateDatabaseCatalogRequest setCatalog(DatabaseCatalog catalog) { + this.catalog = catalog; + return this; + } + + public DatabaseCatalog getCatalog() { + return catalog; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateDatabaseCatalogRequest that = (CreateDatabaseCatalogRequest) o; + return Objects.equals(catalog, that.catalog); + } + + @Override + public int hashCode() { + return Objects.hash(catalog); + } + + @Override + public String toString() { + return new ToStringer(CreateDatabaseCatalogRequest.class).add("catalog", catalog).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseInstanceRequest.java new file mode 100755 index 000000000..2aa9d2a71 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseInstanceRequest.java @@ -0,0 +1,48 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Create a Database Instance */ +@Generated +public class CreateDatabaseInstanceRequest { + /** + * A DatabaseInstance represents a logical Postgres instance, comprised of both compute and + * storage. + */ + @JsonProperty("database_instance") + private DatabaseInstance databaseInstance; + + public CreateDatabaseInstanceRequest setDatabaseInstance(DatabaseInstance databaseInstance) { + this.databaseInstance = databaseInstance; + return this; + } + + public DatabaseInstance getDatabaseInstance() { + return databaseInstance; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateDatabaseInstanceRequest that = (CreateDatabaseInstanceRequest) o; + return Objects.equals(databaseInstance, that.databaseInstance); + } + + @Override + public int hashCode() { + return Objects.hash(databaseInstance); + } + + @Override + public String toString() { + return new ToStringer(CreateDatabaseInstanceRequest.class) + .add("databaseInstance", databaseInstance) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSyncedDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSyncedDatabaseTableRequest.java new file mode 100755 index 000000000..2c1ea4700 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSyncedDatabaseTableRequest.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Create a Synced Database Table */ +@Generated +public class CreateSyncedDatabaseTableRequest { + /** Next field marker: 10 */ + @JsonProperty("synced_table") + private SyncedDatabaseTable syncedTable; + + public CreateSyncedDatabaseTableRequest setSyncedTable(SyncedDatabaseTable syncedTable) { + this.syncedTable = syncedTable; + return this; + } + + public SyncedDatabaseTable getSyncedTable() { + return syncedTable; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateSyncedDatabaseTableRequest that = (CreateSyncedDatabaseTableRequest) o; + return Objects.equals(syncedTable, that.syncedTable); + } + + @Override + public int hashCode() { + return Objects.hash(syncedTable); + } + + @Override + public String toString() { + return new ToStringer(CreateSyncedDatabaseTableRequest.class) + .add("syncedTable", syncedTable) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseCatalog.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseCatalog.java new file mode 100755 index 000000000..b4d1c2d57 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseCatalog.java @@ -0,0 +1,104 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class DatabaseCatalog { + /** */ + @JsonProperty("create_database_if_not_exists") + private Boolean createDatabaseIfNotExists; + + /** The name of the DatabaseInstance housing the database. */ + @JsonProperty("database_instance_name") + private String databaseInstanceName; + + /** The name of the database (in a instance) associated with the catalog. */ + @JsonProperty("database_name") + private String databaseName; + + /** The name of the catalog in UC. */ + @JsonProperty("name") + private String name; + + /** */ + @JsonProperty("uid") + private String uid; + + public DatabaseCatalog setCreateDatabaseIfNotExists(Boolean createDatabaseIfNotExists) { + this.createDatabaseIfNotExists = createDatabaseIfNotExists; + return this; + } + + public Boolean getCreateDatabaseIfNotExists() { + return createDatabaseIfNotExists; + } + + public DatabaseCatalog setDatabaseInstanceName(String databaseInstanceName) { + this.databaseInstanceName = databaseInstanceName; + return this; + } + + public String getDatabaseInstanceName() { + return databaseInstanceName; + } + + public DatabaseCatalog setDatabaseName(String databaseName) { + this.databaseName = databaseName; + return this; + } + + public String getDatabaseName() { + return databaseName; + } + + public DatabaseCatalog setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public DatabaseCatalog setUid(String uid) { + this.uid = uid; + return this; + } + + public String getUid() { + return uid; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DatabaseCatalog that = (DatabaseCatalog) o; + return Objects.equals(createDatabaseIfNotExists, that.createDatabaseIfNotExists) + && Objects.equals(databaseInstanceName, that.databaseInstanceName) + && Objects.equals(databaseName, that.databaseName) + && Objects.equals(name, that.name) + && Objects.equals(uid, that.uid); + } + + @Override + public int hashCode() { + return Objects.hash(createDatabaseIfNotExists, databaseInstanceName, databaseName, name, uid); + } + + @Override + public String toString() { + return new ToStringer(DatabaseCatalog.class) + .add("createDatabaseIfNotExists", createDatabaseIfNotExists) + .add("databaseInstanceName", databaseInstanceName) + .add("databaseName", databaseName) + .add("name", name) + .add("uid", uid) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstance.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstance.java new file mode 100755 index 000000000..ce72b3cba --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstance.java @@ -0,0 +1,208 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * A DatabaseInstance represents a logical Postgres instance, comprised of both compute and storage. + */ +@Generated +public class DatabaseInstance { + /** Password for admin user to create. If not provided, no user will be created. */ + @JsonProperty("admin_password") + private String adminPassword; + + /** Name of the admin role for the instance. If not provided, defaults to 'databricks_admin'. */ + @JsonProperty("admin_rolename") + private String adminRolename; + + /** The sku of the instance. Valid values are "CU_1", "CU_2", "CU_4". */ + @JsonProperty("capacity") + private String capacity; + + /** The timestamp when the instance was created. */ + @JsonProperty("creation_time") + private String creationTime; + + /** The email of the creator of the instance. */ + @JsonProperty("creator") + private String creator; + + /** The name of the instance. This is the unique identifier for the instance. */ + @JsonProperty("name") + private String name; + + /** The version of Postgres running on the instance. */ + @JsonProperty("pg_version") + private String pgVersion; + + /** The DNS endpoint to connect to the instance for read+write access. */ + @JsonProperty("read_write_dns") + private String readWriteDns; + + /** The current state of the instance. */ + @JsonProperty("state") + private DatabaseInstanceState state; + + /** Whether the instance is stopped. */ + @JsonProperty("stopped") + private Boolean stopped; + + /** An immutable UUID identifier for the instance. */ + @JsonProperty("uid") + private String uid; + + public DatabaseInstance setAdminPassword(String adminPassword) { + this.adminPassword = adminPassword; + return this; + } + + public String getAdminPassword() { + return adminPassword; + } + + public DatabaseInstance setAdminRolename(String adminRolename) { + this.adminRolename = adminRolename; + return this; + } + + public String getAdminRolename() { + return adminRolename; + } + + public DatabaseInstance setCapacity(String capacity) { + this.capacity = capacity; + return this; + } + + public String getCapacity() { + return capacity; + } + + public DatabaseInstance setCreationTime(String creationTime) { + this.creationTime = creationTime; + return this; + } + + public String getCreationTime() { + return creationTime; + } + + public DatabaseInstance setCreator(String creator) { + this.creator = creator; + return this; + } + + public String getCreator() { + return creator; + } + + public DatabaseInstance setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public DatabaseInstance setPgVersion(String pgVersion) { + this.pgVersion = pgVersion; + return this; + } + + public String getPgVersion() { + return pgVersion; + } + + public DatabaseInstance setReadWriteDns(String readWriteDns) { + this.readWriteDns = readWriteDns; + return this; + } + + public String getReadWriteDns() { + return readWriteDns; + } + + public DatabaseInstance setState(DatabaseInstanceState state) { + this.state = state; + return this; + } + + public DatabaseInstanceState getState() { + return state; + } + + public DatabaseInstance setStopped(Boolean stopped) { + this.stopped = stopped; + return this; + } + + public Boolean getStopped() { + return stopped; + } + + public DatabaseInstance setUid(String uid) { + this.uid = uid; + return this; + } + + public String getUid() { + return uid; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DatabaseInstance that = (DatabaseInstance) o; + return Objects.equals(adminPassword, that.adminPassword) + && Objects.equals(adminRolename, that.adminRolename) + && Objects.equals(capacity, that.capacity) + && Objects.equals(creationTime, that.creationTime) + && Objects.equals(creator, that.creator) + && Objects.equals(name, that.name) + && Objects.equals(pgVersion, that.pgVersion) + && Objects.equals(readWriteDns, that.readWriteDns) + && Objects.equals(state, that.state) + && Objects.equals(stopped, that.stopped) + && Objects.equals(uid, that.uid); + } + + @Override + public int hashCode() { + return Objects.hash( + adminPassword, + adminRolename, + capacity, + creationTime, + creator, + name, + pgVersion, + readWriteDns, + state, + stopped, + uid); + } + + @Override + public String toString() { + return new ToStringer(DatabaseInstance.class) + .add("adminPassword", adminPassword) + .add("adminRolename", adminRolename) + .add("capacity", capacity) + .add("creationTime", creationTime) + .add("creator", creator) + .add("name", name) + .add("pgVersion", pgVersion) + .add("readWriteDns", readWriteDns) + .add("state", state) + .add("stopped", stopped) + .add("uid", uid) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstanceState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstanceState.java new file mode 100755 index 000000000..909921d03 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstanceState.java @@ -0,0 +1,15 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum DatabaseInstanceState { + AVAILABLE, + DELETING, + FAILING_OVER, + STARTING, + STOPPED, + UPDATING, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesAPI.java new file mode 100755 index 000000000..c91d638d3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesAPI.java @@ -0,0 +1,147 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Database Instances provide access to a database via REST API or direct SQL. */ +@Generated +public class DatabaseInstancesAPI { + private static final Logger LOG = LoggerFactory.getLogger(DatabaseInstancesAPI.class); + + private final DatabaseInstancesService impl; + + /** Regular-use constructor */ + public DatabaseInstancesAPI(ApiClient apiClient) { + impl = new DatabaseInstancesImpl(apiClient); + } + + /** Constructor for mocks */ + public DatabaseInstancesAPI(DatabaseInstancesService mock) { + impl = mock; + } + + public DatabaseCatalog createDatabaseCatalog(DatabaseCatalog catalog) { + return createDatabaseCatalog(new CreateDatabaseCatalogRequest().setCatalog(catalog)); + } + + /** Create a Database Catalog. */ + public DatabaseCatalog createDatabaseCatalog(CreateDatabaseCatalogRequest request) { + return impl.createDatabaseCatalog(request); + } + + public DatabaseInstance createDatabaseInstance(DatabaseInstance databaseInstance) { + return createDatabaseInstance( + new CreateDatabaseInstanceRequest().setDatabaseInstance(databaseInstance)); + } + + /** Create a Database Instance. */ + public DatabaseInstance createDatabaseInstance(CreateDatabaseInstanceRequest request) { + return impl.createDatabaseInstance(request); + } + + public SyncedDatabaseTable createSyncedDatabaseTable(SyncedDatabaseTable syncedTable) { + return createSyncedDatabaseTable( + new CreateSyncedDatabaseTableRequest().setSyncedTable(syncedTable)); + } + + /** Create a Synced Database Table. */ + public SyncedDatabaseTable createSyncedDatabaseTable(CreateSyncedDatabaseTableRequest request) { + return impl.createSyncedDatabaseTable(request); + } + + public void deleteDatabaseCatalog(String name) { + deleteDatabaseCatalog(new DeleteDatabaseCatalogRequest().setName(name)); + } + + /** Delete a Database Catalog. */ + public void deleteDatabaseCatalog(DeleteDatabaseCatalogRequest request) { + impl.deleteDatabaseCatalog(request); + } + + public void deleteDatabaseInstance(String name) { + deleteDatabaseInstance(new DeleteDatabaseInstanceRequest().setName(name)); + } + + /** Delete a Database Instance. */ + public void deleteDatabaseInstance(DeleteDatabaseInstanceRequest request) { + impl.deleteDatabaseInstance(request); + } + + public void deleteSyncedDatabaseTable(String name) { + deleteSyncedDatabaseTable(new DeleteSyncedDatabaseTableRequest().setName(name)); + } + + /** Delete a Synced Database Table. */ + public void deleteSyncedDatabaseTable(DeleteSyncedDatabaseTableRequest request) { + impl.deleteSyncedDatabaseTable(request); + } + + /** Find a Database Instance by uid. */ + public DatabaseInstance findDatabaseInstanceByUid(FindDatabaseInstanceByUidRequest request) { + return impl.findDatabaseInstanceByUid(request); + } + + public DatabaseCatalog getDatabaseCatalog(String name) { + return getDatabaseCatalog(new GetDatabaseCatalogRequest().setName(name)); + } + + /** Get a Database Catalog. */ + public DatabaseCatalog getDatabaseCatalog(GetDatabaseCatalogRequest request) { + return impl.getDatabaseCatalog(request); + } + + public DatabaseInstance getDatabaseInstance(String name) { + return getDatabaseInstance(new GetDatabaseInstanceRequest().setName(name)); + } + + /** Get a Database Instance. */ + public DatabaseInstance getDatabaseInstance(GetDatabaseInstanceRequest request) { + return impl.getDatabaseInstance(request); + } + + public SyncedDatabaseTable getSyncedDatabaseTable(String name) { + return getSyncedDatabaseTable(new GetSyncedDatabaseTableRequest().setName(name)); + } + + /** Get a Synced Database Table. */ + public SyncedDatabaseTable getSyncedDatabaseTable(GetSyncedDatabaseTableRequest request) { + return impl.getSyncedDatabaseTable(request); + } + + /** List Database Instances. */ + public Iterable listDatabaseInstances(ListDatabaseInstancesRequest request) { + return new Paginator<>( + request, + impl::listDatabaseInstances, + ListDatabaseInstancesResponse::getDatabaseInstances, + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); + } + + public DatabaseInstance updateDatabaseInstance( + String name, DatabaseInstance databaseInstance, String updateMask) { + return updateDatabaseInstance( + new UpdateDatabaseInstanceRequest() + .setName(name) + .setDatabaseInstance(databaseInstance) + .setUpdateMask(updateMask)); + } + + /** Update a Database Instance. */ + public DatabaseInstance updateDatabaseInstance(UpdateDatabaseInstanceRequest request) { + return impl.updateDatabaseInstance(request); + } + + public DatabaseInstancesService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesImpl.java new file mode 100755 index 000000000..bea3e0708 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesImpl.java @@ -0,0 +1,178 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.support.Generated; +import java.io.IOException; + +/** Package-local implementation of DatabaseInstances */ +@Generated +class DatabaseInstancesImpl implements DatabaseInstancesService { + private final ApiClient apiClient; + + public DatabaseInstancesImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public DatabaseCatalog createDatabaseCatalog(CreateDatabaseCatalogRequest request) { + String path = "/api/2.0/database/catalogs"; + try { + Request req = new Request("POST", path, apiClient.serialize(request.getCatalog())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, DatabaseCatalog.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public DatabaseInstance createDatabaseInstance(CreateDatabaseInstanceRequest request) { + String path = "/api/2.0/database/instances"; + try { + Request req = new Request("POST", path, apiClient.serialize(request.getDatabaseInstance())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, DatabaseInstance.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public SyncedDatabaseTable createSyncedDatabaseTable(CreateSyncedDatabaseTableRequest request) { + String path = "/api/2.0/database/synced_tables"; + try { + Request req = new Request("POST", path, apiClient.serialize(request.getSyncedTable())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, SyncedDatabaseTable.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteDatabaseCatalog(DeleteDatabaseCatalogRequest request) { + String path = String.format("/api/2.0/database/catalogs/%s", request.getName()); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteDatabaseCatalogResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteDatabaseInstance(DeleteDatabaseInstanceRequest request) { + String path = String.format("/api/2.0/database/instances/%s", request.getName()); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteDatabaseInstanceResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteSyncedDatabaseTable(DeleteSyncedDatabaseTableRequest request) { + String path = String.format("/api/2.0/database/synced_tables/%s", request.getName()); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteSyncedDatabaseTableResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public DatabaseInstance findDatabaseInstanceByUid(FindDatabaseInstanceByUidRequest request) { + String path = "/api/2.0/database/instances:findByUid"; + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, DatabaseInstance.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public DatabaseCatalog getDatabaseCatalog(GetDatabaseCatalogRequest request) { + String path = String.format("/api/2.0/database/catalogs/%s", request.getName()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, DatabaseCatalog.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public DatabaseInstance getDatabaseInstance(GetDatabaseInstanceRequest request) { + String path = String.format("/api/2.0/database/instances/%s", request.getName()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, DatabaseInstance.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public SyncedDatabaseTable getSyncedDatabaseTable(GetSyncedDatabaseTableRequest request) { + String path = String.format("/api/2.0/database/synced_tables/%s", request.getName()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, SyncedDatabaseTable.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ListDatabaseInstancesResponse listDatabaseInstances(ListDatabaseInstancesRequest request) { + String path = "/api/2.0/database/instances"; + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListDatabaseInstancesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public DatabaseInstance updateDatabaseInstance(UpdateDatabaseInstanceRequest request) { + String path = String.format("/api/2.0/database/instances/%s", request.getName()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request.getDatabaseInstance())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, DatabaseInstance.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesService.java new file mode 100755 index 000000000..9bf012769 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesService.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; + +/** + * Database Instances provide access to a database via REST API or direct SQL. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface DatabaseInstancesService { + /** Create a Database Catalog. */ + DatabaseCatalog createDatabaseCatalog(CreateDatabaseCatalogRequest createDatabaseCatalogRequest); + + /** Create a Database Instance. */ + DatabaseInstance createDatabaseInstance( + CreateDatabaseInstanceRequest createDatabaseInstanceRequest); + + /** Create a Synced Database Table. */ + SyncedDatabaseTable createSyncedDatabaseTable( + CreateSyncedDatabaseTableRequest createSyncedDatabaseTableRequest); + + /** Delete a Database Catalog. */ + void deleteDatabaseCatalog(DeleteDatabaseCatalogRequest deleteDatabaseCatalogRequest); + + /** Delete a Database Instance. */ + void deleteDatabaseInstance(DeleteDatabaseInstanceRequest deleteDatabaseInstanceRequest); + + /** Delete a Synced Database Table. */ + void deleteSyncedDatabaseTable(DeleteSyncedDatabaseTableRequest deleteSyncedDatabaseTableRequest); + + /** Find a Database Instance by uid. */ + DatabaseInstance findDatabaseInstanceByUid( + FindDatabaseInstanceByUidRequest findDatabaseInstanceByUidRequest); + + /** Get a Database Catalog. */ + DatabaseCatalog getDatabaseCatalog(GetDatabaseCatalogRequest getDatabaseCatalogRequest); + + /** Get a Database Instance. */ + DatabaseInstance getDatabaseInstance(GetDatabaseInstanceRequest getDatabaseInstanceRequest); + + /** Get a Synced Database Table. */ + SyncedDatabaseTable getSyncedDatabaseTable( + GetSyncedDatabaseTableRequest getSyncedDatabaseTableRequest); + + /** List Database Instances. */ + ListDatabaseInstancesResponse listDatabaseInstances( + ListDatabaseInstancesRequest listDatabaseInstancesRequest); + + /** Update a Database Instance. */ + DatabaseInstance updateDatabaseInstance( + UpdateDatabaseInstanceRequest updateDatabaseInstanceRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogRequest.java new file mode 100755 index 000000000..3a455fea8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogRequest.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a Database Catalog */ +@Generated +public class DeleteDatabaseCatalogRequest { + /** */ + @JsonIgnore private String name; + + public DeleteDatabaseCatalogRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDatabaseCatalogRequest that = (DeleteDatabaseCatalogRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(DeleteDatabaseCatalogRequest.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeTypeFlexibility.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogResponse.java similarity index 56% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeTypeFlexibility.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogResponse.java index 7366ed43d..17de1764a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeTypeFlexibility.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogResponse.java @@ -1,18 +1,13 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.compute; +package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import java.util.Objects; -/** - * For Fleet-V2 using classic clusters, this object contains the information about the alternate - * node type ids to use when attempting to launch a cluster. It can be used with both the driver and - * worker node types. - */ @Generated -public class NodeTypeFlexibility { +public class DeleteDatabaseCatalogResponse { @Override public boolean equals(Object o) { @@ -28,6 +23,6 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(NodeTypeFlexibility.class).toString(); + return new ToStringer(DeleteDatabaseCatalogResponse.class).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceRequest.java new file mode 100755 index 000000000..e043e1347 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceRequest.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a Database Instance */ +@Generated +public class DeleteDatabaseInstanceRequest { + /** + * By default, a instance cannot be deleted if it has descendant instances created via PITR. If + * this flag is specified as true, all descendent instances will be deleted as well. + */ + @JsonIgnore + @QueryParam("force") + private Boolean force; + + /** Name of the instance to delete. */ + @JsonIgnore private String name; + + /** + * If false, the database instance is soft deleted. Soft deleted instances behave as if they are + * deleted, and cannot be used for CRUD operations nor connected to. However they can be undeleted + * by calling the undelete API for a limited time. If true, the database instance is hard deleted + * and cannot be undeleted. + */ + @JsonIgnore + @QueryParam("purge") + private Boolean purge; + + public DeleteDatabaseInstanceRequest setForce(Boolean force) { + this.force = force; + return this; + } + + public Boolean getForce() { + return force; + } + + public DeleteDatabaseInstanceRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public DeleteDatabaseInstanceRequest setPurge(Boolean purge) { + this.purge = purge; + return this; + } + + public Boolean getPurge() { + return purge; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDatabaseInstanceRequest that = (DeleteDatabaseInstanceRequest) o; + return Objects.equals(force, that.force) + && Objects.equals(name, that.name) + && Objects.equals(purge, that.purge); + } + + @Override + public int hashCode() { + return Objects.hash(force, name, purge); + } + + @Override + public String toString() { + return new ToStringer(DeleteDatabaseInstanceRequest.class) + .add("force", force) + .add("name", name) + .add("purge", purge) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceResponse.java new file mode 100755 index 000000000..4d96f2e05 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceResponse.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +public class DeleteDatabaseInstanceResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteDatabaseInstanceResponse.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableRequest.java new file mode 100755 index 000000000..506ab393b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableRequest.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a Synced Database Table */ +@Generated +public class DeleteSyncedDatabaseTableRequest { + /** */ + @JsonIgnore private String name; + + public DeleteSyncedDatabaseTableRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteSyncedDatabaseTableRequest that = (DeleteSyncedDatabaseTableRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(DeleteSyncedDatabaseTableRequest.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableResponse.java new file mode 100755 index 000000000..147f31d48 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableResponse.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +public class DeleteSyncedDatabaseTableResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteSyncedDatabaseTableResponse.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableRequest.java index 2974dbfa7..7a0a9a477 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableRequest.java @@ -5,17 +5,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; -/** Enable a system schema */ @Generated public class EnableRequest { + /** the catalog for which the system schema is to enabled in */ + @JsonProperty("catalog_name") + private String catalogName; + /** The metastore ID under which the system schema lives. */ @JsonIgnore private String metastoreId; /** Full name of the system schema. */ @JsonIgnore private String schemaName; + public EnableRequest setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + public EnableRequest setMetastoreId(String metastoreId) { this.metastoreId = metastoreId; return this; @@ -39,18 +52,20 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; EnableRequest that = (EnableRequest) o; - return Objects.equals(metastoreId, that.metastoreId) + return Objects.equals(catalogName, that.catalogName) + && Objects.equals(metastoreId, that.metastoreId) && Objects.equals(schemaName, that.schemaName); } @Override public int hashCode() { - return Objects.hash(metastoreId, schemaName); + return Objects.hash(catalogName, metastoreId, schemaName); } @Override public String toString() { return new ToStringer(EnableRequest.class) + .add("catalogName", catalogName) .add("metastoreId", metastoreId) .add("schemaName", schemaName) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FindDatabaseInstanceByUidRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FindDatabaseInstanceByUidRequest.java new file mode 100755 index 000000000..894cb8153 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FindDatabaseInstanceByUidRequest.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Find a Database Instance by uid */ +@Generated +public class FindDatabaseInstanceByUidRequest { + /** UID of the cluster to get. */ + @JsonIgnore + @QueryParam("uid") + private String uid; + + public FindDatabaseInstanceByUidRequest setUid(String uid) { + this.uid = uid; + return this; + } + + public String getUid() { + return uid; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FindDatabaseInstanceByUidRequest that = (FindDatabaseInstanceByUidRequest) o; + return Objects.equals(uid, that.uid); + } + + @Override + public int hashCode() { + return Objects.hash(uid); + } + + @Override + public String toString() { + return new ToStringer(FindDatabaseInstanceByUidRequest.class).add("uid", uid).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsRequest.java index 5bdae1c43..f0fad5bb6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsRequest.java @@ -30,8 +30,11 @@ public class GetBindingsRequest { /** The name of the securable. */ @JsonIgnore private String securableName; - /** The type of the securable to bind to a workspace. */ - @JsonIgnore private GetBindingsSecurableType securableType; + /** + * The type of the securable to bind to a workspace (catalog, storage_credential, credential, or + * external_location). + */ + @JsonIgnore private String securableType; public GetBindingsRequest setMaxResults(Long maxResults) { this.maxResults = maxResults; @@ -60,12 +63,12 @@ public String getSecurableName() { return securableName; } - public GetBindingsRequest setSecurableType(GetBindingsSecurableType securableType) { + public GetBindingsRequest setSecurableType(String securableType) { this.securableType = securableType; return this; } - public GetBindingsSecurableType getSecurableType() { + public String getSecurableType() { return securableType; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsSecurableType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsSecurableType.java deleted file mode 100755 index d35c64de5..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsSecurableType.java +++ /dev/null @@ -1,21 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.catalog; - -import com.databricks.sdk.support.Generated; -import com.fasterxml.jackson.annotation.JsonProperty; - -@Generated -public enum GetBindingsSecurableType { - @JsonProperty("catalog") - CATALOG, - - @JsonProperty("credential") - CREDENTIAL, - - @JsonProperty("external_location") - EXTERNAL_LOCATION, - - @JsonProperty("storage_credential") - STORAGE_CREDENTIAL, -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CurrentWorkspaceBindings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogWorkspaceBindingsResponse.java similarity index 68% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CurrentWorkspaceBindings.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogWorkspaceBindingsResponse.java index 09bc30eda..b2aedb9a5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CurrentWorkspaceBindings.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogWorkspaceBindingsResponse.java @@ -8,14 +8,13 @@ import java.util.Collection; import java.util.Objects; -/** Currently assigned workspaces */ @Generated -public class CurrentWorkspaceBindings { - /** A list of workspace IDs. */ +public class GetCatalogWorkspaceBindingsResponse { + /** A list of workspace IDs */ @JsonProperty("workspaces") private Collection workspaces; - public CurrentWorkspaceBindings setWorkspaces(Collection workspaces) { + public GetCatalogWorkspaceBindingsResponse setWorkspaces(Collection workspaces) { this.workspaces = workspaces; return this; } @@ -28,7 +27,7 @@ public Collection getWorkspaces() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - CurrentWorkspaceBindings that = (CurrentWorkspaceBindings) o; + GetCatalogWorkspaceBindingsResponse that = (GetCatalogWorkspaceBindingsResponse) o; return Objects.equals(workspaces, that.workspaces); } @@ -39,6 +38,8 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(CurrentWorkspaceBindings.class).add("workspaces", workspaces).toString(); + return new ToStringer(GetCatalogWorkspaceBindingsResponse.class) + .add("workspaces", workspaces) + .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetDatabaseCatalogRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetDatabaseCatalogRequest.java new file mode 100755 index 000000000..cd9b08f25 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetDatabaseCatalogRequest.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a Database Catalog */ +@Generated +public class GetDatabaseCatalogRequest { + /** */ + @JsonIgnore private String name; + + public GetDatabaseCatalogRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetDatabaseCatalogRequest that = (GetDatabaseCatalogRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetDatabaseCatalogRequest.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetDatabaseInstanceRequest.java new file mode 100755 index 000000000..131d51e48 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetDatabaseInstanceRequest.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a Database Instance */ +@Generated +public class GetDatabaseInstanceRequest { + /** Name of the cluster to get. */ + @JsonIgnore private String name; + + public GetDatabaseInstanceRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetDatabaseInstanceRequest that = (GetDatabaseInstanceRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetDatabaseInstanceRequest.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSyncedDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSyncedDatabaseTableRequest.java new file mode 100755 index 000000000..4c54608d7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSyncedDatabaseTableRequest.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a Synced Database Table */ +@Generated +public class GetSyncedDatabaseTableRequest { + /** */ + @JsonIgnore private String name; + + public GetSyncedDatabaseTableRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetSyncedDatabaseTableRequest that = (GetSyncedDatabaseTableRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetSyncedDatabaseTableRequest.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingsResponse.java similarity index 79% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsResponse.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingsResponse.java index 7465fa5d7..7052a938b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingsResponse.java @@ -8,9 +8,8 @@ import java.util.Collection; import java.util.Objects; -/** Currently assigned workspace bindings */ @Generated -public class WorkspaceBindingsResponse { +public class GetWorkspaceBindingsResponse { /** List of workspace bindings */ @JsonProperty("bindings") private Collection bindings; @@ -22,7 +21,7 @@ public class WorkspaceBindingsResponse { @JsonProperty("next_page_token") private String nextPageToken; - public WorkspaceBindingsResponse setBindings(Collection bindings) { + public GetWorkspaceBindingsResponse setBindings(Collection bindings) { this.bindings = bindings; return this; } @@ -31,7 +30,7 @@ public Collection getBindings() { return bindings; } - public WorkspaceBindingsResponse setNextPageToken(String nextPageToken) { + public GetWorkspaceBindingsResponse setNextPageToken(String nextPageToken) { this.nextPageToken = nextPageToken; return this; } @@ -44,7 +43,7 @@ public String getNextPageToken() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - WorkspaceBindingsResponse that = (WorkspaceBindingsResponse) o; + GetWorkspaceBindingsResponse that = (GetWorkspaceBindingsResponse) o; return Objects.equals(bindings, that.bindings) && Objects.equals(nextPageToken, that.nextPageToken); } @@ -56,7 +55,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(WorkspaceBindingsResponse.class) + return new ToStringer(GetWorkspaceBindingsResponse.class) .add("bindings", bindings) .add("nextPageToken", nextPageToken) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListDatabaseInstancesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListDatabaseInstancesRequest.java new file mode 100755 index 000000000..dbea4c7b3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListDatabaseInstancesRequest.java @@ -0,0 +1,64 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List Database Instances */ +@Generated +public class ListDatabaseInstancesRequest { + /** Upper bound for items returned. */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** + * Pagination token to go to the next page of Database Instances. Requests first page if absent. + */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListDatabaseInstancesRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListDatabaseInstancesRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListDatabaseInstancesRequest that = (ListDatabaseInstancesRequest) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListDatabaseInstancesRequest.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListDatabaseInstancesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListDatabaseInstancesResponse.java new file mode 100755 index 000000000..e797c3e3a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListDatabaseInstancesResponse.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListDatabaseInstancesResponse { + /** List of instances. */ + @JsonProperty("database_instances") + private Collection databaseInstances; + + /** Pagination token to request the next page of instances. */ + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListDatabaseInstancesResponse setDatabaseInstances( + Collection databaseInstances) { + this.databaseInstances = databaseInstances; + return this; + } + + public Collection getDatabaseInstances() { + return databaseInstances; + } + + public ListDatabaseInstancesResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListDatabaseInstancesResponse that = (ListDatabaseInstancesResponse) o; + return Objects.equals(databaseInstances, that.databaseInstances) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(databaseInstances, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListDatabaseInstancesResponse.class) + .add("databaseInstances", databaseInstances) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NewPipelineSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NewPipelineSpec.java new file mode 100755 index 000000000..2cf1e9752 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NewPipelineSpec.java @@ -0,0 +1,69 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Custom fields that user can set for pipeline while creating SyncedDatabaseTable. Note that other + * fields of pipeline are still inferred by table def internally + */ +@Generated +public class NewPipelineSpec { + /** + * UC catalog for the pipeline to store intermediate files (checkpoints, event logs etc). This + * needs to be a standard catalog where the user has permissions to create Delta tables. + */ + @JsonProperty("storage_catalog") + private String storageCatalog; + + /** + * UC schema for the pipeline to store intermediate files (checkpoints, event logs etc). This + * needs to be in the standard catalog where the user has permissions to create Delta tables. + */ + @JsonProperty("storage_schema") + private String storageSchema; + + public NewPipelineSpec setStorageCatalog(String storageCatalog) { + this.storageCatalog = storageCatalog; + return this; + } + + public String getStorageCatalog() { + return storageCatalog; + } + + public NewPipelineSpec setStorageSchema(String storageSchema) { + this.storageSchema = storageSchema; + return this; + } + + public String getStorageSchema() { + return storageSchema; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NewPipelineSpec that = (NewPipelineSpec) o; + return Objects.equals(storageCatalog, that.storageCatalog) + && Objects.equals(storageSchema, that.storageSchema); + } + + @Override + public int hashCode() { + return Objects.hash(storageCatalog, storageSchema); + } + + @Override + public String toString() { + return new ToStringer(NewPipelineSpec.class) + .add("storageCatalog", storageCatalog) + .add("storageSchema", storageSchema) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedDatabaseTable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedDatabaseTable.java new file mode 100755 index 000000000..dc513fb25 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedDatabaseTable.java @@ -0,0 +1,166 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Next field marker: 10 */ +@Generated +public class SyncedDatabaseTable { + /** Synced Table data synchronization status */ + @JsonProperty("data_synchronization_status") + private OnlineTableStatus dataSynchronizationStatus; + + /** + * Name of the target database instance. This is required when creating synced database tables in + * standard catalogs. This is optional when creating synced database tables in registered + * catalogs. If this field is specified when creating synced database tables in registered + * catalogs, the database instance name MUST match that of the registered catalog (or the request + * will be rejected). + */ + @JsonProperty("database_instance_name") + private String databaseInstanceName; + + /** + * Target Postgres database object (logical database) name for this table. This field is optional + * in all scenarios. + * + *

When creating a synced table in a registered Postgres catalog, the target Postgres database + * name is inferred to be that of the registered catalog. If this field is specified in this + * scenario, the Postgres database name MUST match that of the registered catalog (or the request + * will be rejected). + * + *

When creating a synced table in a standard catalog, the target database name is inferred to + * be that of the standard catalog. In this scenario, specifying this field will allow targeting + * an arbitrary postgres database. + */ + @JsonProperty("logical_database_name") + private String logicalDatabaseName; + + /** Full three-part (catalog, schema, table) name of the table. */ + @JsonProperty("name") + private String name; + + /** Specification of a synced database table. */ + @JsonProperty("spec") + private SyncedTableSpec spec; + + /** Data serving REST API URL for this table */ + @JsonProperty("table_serving_url") + private String tableServingUrl; + + /** + * The provisioning state of the synced table entity in Unity Catalog. This is distinct from the + * state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline + * may be in "PROVISIONING" as it runs asynchronously). + */ + @JsonProperty("unity_catalog_provisioning_state") + private ProvisioningInfoState unityCatalogProvisioningState; + + public SyncedDatabaseTable setDataSynchronizationStatus( + OnlineTableStatus dataSynchronizationStatus) { + this.dataSynchronizationStatus = dataSynchronizationStatus; + return this; + } + + public OnlineTableStatus getDataSynchronizationStatus() { + return dataSynchronizationStatus; + } + + public SyncedDatabaseTable setDatabaseInstanceName(String databaseInstanceName) { + this.databaseInstanceName = databaseInstanceName; + return this; + } + + public String getDatabaseInstanceName() { + return databaseInstanceName; + } + + public SyncedDatabaseTable setLogicalDatabaseName(String logicalDatabaseName) { + this.logicalDatabaseName = logicalDatabaseName; + return this; + } + + public String getLogicalDatabaseName() { + return logicalDatabaseName; + } + + public SyncedDatabaseTable setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public SyncedDatabaseTable setSpec(SyncedTableSpec spec) { + this.spec = spec; + return this; + } + + public SyncedTableSpec getSpec() { + return spec; + } + + public SyncedDatabaseTable setTableServingUrl(String tableServingUrl) { + this.tableServingUrl = tableServingUrl; + return this; + } + + public String getTableServingUrl() { + return tableServingUrl; + } + + public SyncedDatabaseTable setUnityCatalogProvisioningState( + ProvisioningInfoState unityCatalogProvisioningState) { + this.unityCatalogProvisioningState = unityCatalogProvisioningState; + return this; + } + + public ProvisioningInfoState getUnityCatalogProvisioningState() { + return unityCatalogProvisioningState; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SyncedDatabaseTable that = (SyncedDatabaseTable) o; + return Objects.equals(dataSynchronizationStatus, that.dataSynchronizationStatus) + && Objects.equals(databaseInstanceName, that.databaseInstanceName) + && Objects.equals(logicalDatabaseName, that.logicalDatabaseName) + && Objects.equals(name, that.name) + && Objects.equals(spec, that.spec) + && Objects.equals(tableServingUrl, that.tableServingUrl) + && Objects.equals(unityCatalogProvisioningState, that.unityCatalogProvisioningState); + } + + @Override + public int hashCode() { + return Objects.hash( + dataSynchronizationStatus, + databaseInstanceName, + logicalDatabaseName, + name, + spec, + tableServingUrl, + unityCatalogProvisioningState); + } + + @Override + public String toString() { + return new ToStringer(SyncedDatabaseTable.class) + .add("dataSynchronizationStatus", dataSynchronizationStatus) + .add("databaseInstanceName", databaseInstanceName) + .add("logicalDatabaseName", logicalDatabaseName) + .add("name", name) + .add("spec", spec) + .add("tableServingUrl", tableServingUrl) + .add("unityCatalogProvisioningState", unityCatalogProvisioningState) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSchedulingPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSchedulingPolicy.java new file mode 100755 index 000000000..0f0fd271d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSchedulingPolicy.java @@ -0,0 +1,12 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum SyncedTableSchedulingPolicy { + CONTINUOUS, + SNAPSHOT, + TRIGGERED, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSpec.java new file mode 100755 index 000000000..0f7ae97ef --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSpec.java @@ -0,0 +1,146 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Specification of a synced database table. */ +@Generated +public class SyncedTableSpec { + /** + * If true, the synced table's logical database and schema resources in PG will be created if they + * do not already exist. + */ + @JsonProperty("create_database_objects_if_missing") + private Boolean createDatabaseObjectsIfMissing; + + /** Spec of new pipeline. Should be empty if pipeline_id is set */ + @JsonProperty("new_pipeline_spec") + private NewPipelineSpec newPipelineSpec; + + /** ID of the associated pipeline. Should be empty if new_pipeline_spec is set */ + @JsonProperty("pipeline_id") + private String pipelineId; + + /** Primary Key columns to be used for data insert/update in the destination. */ + @JsonProperty("primary_key_columns") + private Collection primaryKeyColumns; + + /** Scheduling policy of the underlying pipeline. */ + @JsonProperty("scheduling_policy") + private SyncedTableSchedulingPolicy schedulingPolicy; + + /** Three-part (catalog, schema, table) name of the source Delta table. */ + @JsonProperty("source_table_full_name") + private String sourceTableFullName; + + /** Time series key to deduplicate (tie-break) rows with the same primary key. */ + @JsonProperty("timeseries_key") + private String timeseriesKey; + + public SyncedTableSpec setCreateDatabaseObjectsIfMissing(Boolean createDatabaseObjectsIfMissing) { + this.createDatabaseObjectsIfMissing = createDatabaseObjectsIfMissing; + return this; + } + + public Boolean getCreateDatabaseObjectsIfMissing() { + return createDatabaseObjectsIfMissing; + } + + public SyncedTableSpec setNewPipelineSpec(NewPipelineSpec newPipelineSpec) { + this.newPipelineSpec = newPipelineSpec; + return this; + } + + public NewPipelineSpec getNewPipelineSpec() { + return newPipelineSpec; + } + + public SyncedTableSpec setPipelineId(String pipelineId) { + this.pipelineId = pipelineId; + return this; + } + + public String getPipelineId() { + return pipelineId; + } + + public SyncedTableSpec setPrimaryKeyColumns(Collection primaryKeyColumns) { + this.primaryKeyColumns = primaryKeyColumns; + return this; + } + + public Collection getPrimaryKeyColumns() { + return primaryKeyColumns; + } + + public SyncedTableSpec setSchedulingPolicy(SyncedTableSchedulingPolicy schedulingPolicy) { + this.schedulingPolicy = schedulingPolicy; + return this; + } + + public SyncedTableSchedulingPolicy getSchedulingPolicy() { + return schedulingPolicy; + } + + public SyncedTableSpec setSourceTableFullName(String sourceTableFullName) { + this.sourceTableFullName = sourceTableFullName; + return this; + } + + public String getSourceTableFullName() { + return sourceTableFullName; + } + + public SyncedTableSpec setTimeseriesKey(String timeseriesKey) { + this.timeseriesKey = timeseriesKey; + return this; + } + + public String getTimeseriesKey() { + return timeseriesKey; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SyncedTableSpec that = (SyncedTableSpec) o; + return Objects.equals(createDatabaseObjectsIfMissing, that.createDatabaseObjectsIfMissing) + && Objects.equals(newPipelineSpec, that.newPipelineSpec) + && Objects.equals(pipelineId, that.pipelineId) + && Objects.equals(primaryKeyColumns, that.primaryKeyColumns) + && Objects.equals(schedulingPolicy, that.schedulingPolicy) + && Objects.equals(sourceTableFullName, that.sourceTableFullName) + && Objects.equals(timeseriesKey, that.timeseriesKey); + } + + @Override + public int hashCode() { + return Objects.hash( + createDatabaseObjectsIfMissing, + newPipelineSpec, + pipelineId, + primaryKeyColumns, + schedulingPolicy, + sourceTableFullName, + timeseriesKey); + } + + @Override + public String toString() { + return new ToStringer(SyncedTableSpec.class) + .add("createDatabaseObjectsIfMissing", createDatabaseObjectsIfMissing) + .add("newPipelineSpec", newPipelineSpec) + .add("pipelineId", pipelineId) + .add("primaryKeyColumns", primaryKeyColumns) + .add("schedulingPolicy", schedulingPolicy) + .add("sourceTableFullName", sourceTableFullName) + .add("timeseriesKey", timeseriesKey) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemaInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemaInfo.java index abbe73085..03d76e0f4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemaInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemaInfo.java @@ -15,10 +15,11 @@ public class SystemSchemaInfo { /** * The current state of enablement for the system schema. An empty string means the system schema - * is available and ready for opt-in. + * is available and ready for opt-in. Possible values: AVAILABLE | ENABLE_INITIALIZED | + * ENABLE_COMPLETED | DISABLE_INITIALIZED | UNAVAILABLE */ @JsonProperty("state") - private SystemSchemaInfoState state; + private String state; public SystemSchemaInfo setSchema(String schema) { this.schema = schema; @@ -29,12 +30,12 @@ public String getSchema() { return schema; } - public SystemSchemaInfo setState(SystemSchemaInfoState state) { + public SystemSchemaInfo setState(String state) { this.state = state; return this; } - public SystemSchemaInfoState getState() { + public String getState() { return state; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemaInfoState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemaInfoState.java deleted file mode 100755 index 97aab838c..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemaInfoState.java +++ /dev/null @@ -1,18 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.catalog; - -import com.databricks.sdk.support.Generated; - -/** - * The current state of enablement for the system schema. An empty string means the system schema is - * available and ready for opt-in. - */ -@Generated -public enum SystemSchemaInfoState { - AVAILABLE, - DISABLE_INITIALIZED, - ENABLE_COMPLETED, - ENABLE_INITIALIZED, - UNAVAILABLE, -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java index b08818600..605036b9e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java @@ -67,10 +67,6 @@ public Iterable list(String metastoreId) { * metastore admin. */ public Iterable list(ListSystemSchemasRequest request) { - - if (request.getMaxResults() == null) { - request.setMaxResults(0L); - } return new Paginator<>( request, impl::list, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasImpl.java index d6ade950b..4adce737b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasImpl.java @@ -39,9 +39,10 @@ public void enable(EnableRequest request) { "/api/2.1/unity-catalog/metastores/%s/systemschemas/%s", request.getMetastoreId(), request.getSchemaName()); try { - Request req = new Request("PUT", path); + Request req = new Request("PUT", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); apiClient.execute(req, EnableResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateBindingsSecurableType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateBindingsSecurableType.java deleted file mode 100755 index 412d8d69b..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateBindingsSecurableType.java +++ /dev/null @@ -1,21 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.catalog; - -import com.databricks.sdk.support.Generated; -import com.fasterxml.jackson.annotation.JsonProperty; - -@Generated -public enum UpdateBindingsSecurableType { - @JsonProperty("catalog") - CATALOG, - - @JsonProperty("credential") - CREDENTIAL, - - @JsonProperty("external_location") - EXTERNAL_LOCATION, - - @JsonProperty("storage_credential") - STORAGE_CREDENTIAL, -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalogWorkspaceBindingsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalogWorkspaceBindingsResponse.java new file mode 100755 index 000000000..d3a70c499 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalogWorkspaceBindingsResponse.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class UpdateCatalogWorkspaceBindingsResponse { + /** A list of workspace IDs */ + @JsonProperty("workspaces") + private Collection workspaces; + + public UpdateCatalogWorkspaceBindingsResponse setWorkspaces(Collection workspaces) { + this.workspaces = workspaces; + return this; + } + + public Collection getWorkspaces() { + return workspaces; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateCatalogWorkspaceBindingsResponse that = (UpdateCatalogWorkspaceBindingsResponse) o; + return Objects.equals(workspaces, that.workspaces); + } + + @Override + public int hashCode() { + return Objects.hash(workspaces); + } + + @Override + public String toString() { + return new ToStringer(UpdateCatalogWorkspaceBindingsResponse.class) + .add("workspaces", workspaces) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateDatabaseInstanceRequest.java new file mode 100755 index 000000000..d40d63ba7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateDatabaseInstanceRequest.java @@ -0,0 +1,80 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Update a Database Instance */ +@Generated +public class UpdateDatabaseInstanceRequest { + /** + * A DatabaseInstance represents a logical Postgres instance, comprised of both compute and + * storage. + */ + @JsonProperty("database_instance") + private DatabaseInstance databaseInstance; + + /** The name of the instance. This is the unique identifier for the instance. */ + @JsonIgnore private String name; + + /** The list of fields to update. */ + @JsonIgnore + @QueryParam("update_mask") + private String updateMask; + + public UpdateDatabaseInstanceRequest setDatabaseInstance(DatabaseInstance databaseInstance) { + this.databaseInstance = databaseInstance; + return this; + } + + public DatabaseInstance getDatabaseInstance() { + return databaseInstance; + } + + public UpdateDatabaseInstanceRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public UpdateDatabaseInstanceRequest setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateDatabaseInstanceRequest that = (UpdateDatabaseInstanceRequest) o; + return Objects.equals(databaseInstance, that.databaseInstance) + && Objects.equals(name, that.name) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(databaseInstance, name, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateDatabaseInstanceRequest.class) + .add("databaseInstance", databaseInstance) + .add("name", name) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsParameters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsParameters.java index 07853f7ff..3af63d755 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsParameters.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsParameters.java @@ -11,19 +11,22 @@ @Generated public class UpdateWorkspaceBindingsParameters { - /** List of workspace bindings */ + /** List of workspace bindings. */ @JsonProperty("add") private Collection add; - /** List of workspace bindings */ + /** List of workspace bindings. */ @JsonProperty("remove") private Collection remove; /** The name of the securable. */ @JsonIgnore private String securableName; - /** The type of the securable to bind to a workspace. */ - @JsonIgnore private UpdateBindingsSecurableType securableType; + /** + * The type of the securable to bind to a workspace (catalog, storage_credential, credential, or + * external_location). + */ + @JsonIgnore private String securableType; public UpdateWorkspaceBindingsParameters setAdd(Collection add) { this.add = add; @@ -52,13 +55,12 @@ public String getSecurableName() { return securableName; } - public UpdateWorkspaceBindingsParameters setSecurableType( - UpdateBindingsSecurableType securableType) { + public UpdateWorkspaceBindingsParameters setSecurableType(String securableType) { this.securableType = securableType; return this; } - public UpdateBindingsSecurableType getSecurableType() { + public String getSecurableType() { return securableType; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsResponse.java new file mode 100755 index 000000000..c87e345fd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsResponse.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** A list of workspace IDs that are bound to the securable */ +@Generated +public class UpdateWorkspaceBindingsResponse { + /** List of workspace bindings. */ + @JsonProperty("bindings") + private Collection bindings; + + public UpdateWorkspaceBindingsResponse setBindings(Collection bindings) { + this.bindings = bindings; + return this; + } + + public Collection getBindings() { + return bindings; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateWorkspaceBindingsResponse that = (UpdateWorkspaceBindingsResponse) o; + return Objects.equals(bindings, that.bindings); + } + + @Override + public int hashCode() { + return Objects.hash(bindings); + } + + @Override + public String toString() { + return new ToStringer(UpdateWorkspaceBindingsResponse.class) + .add("bindings", bindings) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBinding.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBinding.java index f7c28ff2a..0d0c8653a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBinding.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBinding.java @@ -9,11 +9,11 @@ @Generated public class WorkspaceBinding { - /** */ + /** One of READ_WRITE/READ_ONLY. Default is READ_WRITE. */ @JsonProperty("binding_type") private WorkspaceBindingBindingType bindingType; - /** */ + /** Required */ @JsonProperty("workspace_id") private Long workspaceId; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingBindingType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingBindingType.java index 23ffd7f73..0aeb76cc9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingBindingType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingBindingType.java @@ -4,6 +4,10 @@ import com.databricks.sdk.support.Generated; +/** + * Using `BINDING_TYPE_` prefix here to avoid conflict with `TableOperation` enum in + * `credentials_common.proto`. + */ @Generated public enum WorkspaceBindingBindingType { BINDING_TYPE_READ_ONLY, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsAPI.java index 8bdddadec..e82e88906 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsAPI.java @@ -23,7 +23,8 @@ * Please use the new path (/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}) which * introduces the ability to bind a securable in READ_ONLY mode (catalogs only). * - *

Securable types that support binding: - catalog - storage_credential - external_location + *

Securable types that support binding: - catalog - storage_credential - credential - + * external_location */ @Generated public class WorkspaceBindingsAPI { @@ -41,7 +42,7 @@ public WorkspaceBindingsAPI(WorkspaceBindingsService mock) { impl = mock; } - public CurrentWorkspaceBindings get(String name) { + public GetCatalogWorkspaceBindingsResponse get(String name) { return get(new GetWorkspaceBindingRequest().setName(name)); } @@ -51,12 +52,11 @@ public CurrentWorkspaceBindings get(String name) { *

Gets workspace bindings of the catalog. The caller must be a metastore admin or an owner of * the catalog. */ - public CurrentWorkspaceBindings get(GetWorkspaceBindingRequest request) { + public GetCatalogWorkspaceBindingsResponse get(GetWorkspaceBindingRequest request) { return impl.get(request); } - public Iterable getBindings( - GetBindingsSecurableType securableType, String securableName) { + public Iterable getBindings(String securableType, String securableName) { return getBindings( new GetBindingsRequest().setSecurableType(securableType).setSecurableName(securableName)); } @@ -71,7 +71,7 @@ public Iterable getBindings(GetBindingsRequest request) { return new Paginator<>( request, impl::getBindings, - WorkspaceBindingsResponse::getBindings, + GetWorkspaceBindingsResponse::getBindings, response -> { String token = response.getNextPageToken(); if (token == null || token.isEmpty()) { @@ -81,7 +81,7 @@ public Iterable getBindings(GetBindingsRequest request) { }); } - public CurrentWorkspaceBindings update(String name) { + public UpdateCatalogWorkspaceBindingsResponse update(String name) { return update(new UpdateWorkspaceBindings().setName(name)); } @@ -91,12 +91,12 @@ public CurrentWorkspaceBindings update(String name) { *

Updates workspace bindings of the catalog. The caller must be a metastore admin or an owner * of the catalog. */ - public CurrentWorkspaceBindings update(UpdateWorkspaceBindings request) { + public UpdateCatalogWorkspaceBindingsResponse update(UpdateWorkspaceBindings request) { return impl.update(request); } - public WorkspaceBindingsResponse updateBindings( - UpdateBindingsSecurableType securableType, String securableName) { + public UpdateWorkspaceBindingsResponse updateBindings( + String securableType, String securableName) { return updateBindings( new UpdateWorkspaceBindingsParameters() .setSecurableType(securableType) @@ -109,7 +109,7 @@ public WorkspaceBindingsResponse updateBindings( *

Updates workspace bindings of the securable. The caller must be a metastore admin or an * owner of the securable. */ - public WorkspaceBindingsResponse updateBindings(UpdateWorkspaceBindingsParameters request) { + public UpdateWorkspaceBindingsResponse updateBindings(UpdateWorkspaceBindingsParameters request) { return impl.updateBindings(request); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsImpl.java index 7516c5686..ab6b07cbd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsImpl.java @@ -17,21 +17,21 @@ public WorkspaceBindingsImpl(ApiClient apiClient) { } @Override - public CurrentWorkspaceBindings get(GetWorkspaceBindingRequest request) { + public GetCatalogWorkspaceBindingsResponse get(GetWorkspaceBindingRequest request) { String path = String.format("/api/2.1/unity-catalog/workspace-bindings/catalogs/%s", request.getName()); try { Request req = new Request("GET", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - return apiClient.execute(req, CurrentWorkspaceBindings.class); + return apiClient.execute(req, GetCatalogWorkspaceBindingsResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } } @Override - public WorkspaceBindingsResponse getBindings(GetBindingsRequest request) { + public GetWorkspaceBindingsResponse getBindings(GetBindingsRequest request) { String path = String.format( "/api/2.1/unity-catalog/bindings/%s/%s", @@ -40,14 +40,14 @@ public WorkspaceBindingsResponse getBindings(GetBindingsRequest request) { Request req = new Request("GET", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - return apiClient.execute(req, WorkspaceBindingsResponse.class); + return apiClient.execute(req, GetWorkspaceBindingsResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } } @Override - public CurrentWorkspaceBindings update(UpdateWorkspaceBindings request) { + public UpdateCatalogWorkspaceBindingsResponse update(UpdateWorkspaceBindings request) { String path = String.format("/api/2.1/unity-catalog/workspace-bindings/catalogs/%s", request.getName()); try { @@ -55,14 +55,14 @@ public CurrentWorkspaceBindings update(UpdateWorkspaceBindings request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - return apiClient.execute(req, CurrentWorkspaceBindings.class); + return apiClient.execute(req, UpdateCatalogWorkspaceBindingsResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } } @Override - public WorkspaceBindingsResponse updateBindings(UpdateWorkspaceBindingsParameters request) { + public UpdateWorkspaceBindingsResponse updateBindings(UpdateWorkspaceBindingsParameters request) { String path = String.format( "/api/2.1/unity-catalog/bindings/%s/%s", @@ -72,7 +72,7 @@ public WorkspaceBindingsResponse updateBindings(UpdateWorkspaceBindingsParameter ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - return apiClient.execute(req, WorkspaceBindingsResponse.class); + return apiClient.execute(req, UpdateWorkspaceBindingsResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsService.java index 9ab53e070..962b4ea69 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsService.java @@ -19,7 +19,8 @@ * Please use the new path (/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}) which * introduces the ability to bind a securable in READ_ONLY mode (catalogs only). * - *

Securable types that support binding: - catalog - storage_credential - external_location + *

Securable types that support binding: - catalog - storage_credential - credential - + * external_location * *

This is the high-level interface, that contains generated methods. * @@ -33,7 +34,7 @@ public interface WorkspaceBindingsService { *

Gets workspace bindings of the catalog. The caller must be a metastore admin or an owner of * the catalog. */ - CurrentWorkspaceBindings get(GetWorkspaceBindingRequest getWorkspaceBindingRequest); + GetCatalogWorkspaceBindingsResponse get(GetWorkspaceBindingRequest getWorkspaceBindingRequest); /** * Get securable workspace bindings. @@ -41,7 +42,7 @@ public interface WorkspaceBindingsService { *

Gets workspace bindings of the securable. The caller must be a metastore admin or an owner * of the securable. */ - WorkspaceBindingsResponse getBindings(GetBindingsRequest getBindingsRequest); + GetWorkspaceBindingsResponse getBindings(GetBindingsRequest getBindingsRequest); /** * Update catalog workspace bindings. @@ -49,7 +50,7 @@ public interface WorkspaceBindingsService { *

Updates workspace bindings of the catalog. The caller must be a metastore admin or an owner * of the catalog. */ - CurrentWorkspaceBindings update(UpdateWorkspaceBindings updateWorkspaceBindings); + UpdateCatalogWorkspaceBindingsResponse update(UpdateWorkspaceBindings updateWorkspaceBindings); /** * Update securable workspace bindings. @@ -57,6 +58,6 @@ public interface WorkspaceBindingsService { *

Updates workspace bindings of the securable. The caller must be a metastore admin or an * owner of the securable. */ - WorkspaceBindingsResponse updateBindings( + UpdateWorkspaceBindingsResponse updateBindings( UpdateWorkspaceBindingsParameters updateWorkspaceBindingsParameters); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePool.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePool.java index d6daf72a8..5f1c2d218 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePool.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePool.java @@ -52,13 +52,6 @@ public class EditInstancePool { @JsonProperty("min_idle_instances") private Long minIdleInstances; - /** - * For Fleet-pool V2, this object contains the information about the alternate node type ids to - * use when attempting to launch a cluster if the node type id is not available. - */ - @JsonProperty("node_type_flexibility") - private NodeTypeFlexibility nodeTypeFlexibility; - /** * This field encodes, through a single value, the resources available to each of the Spark nodes * in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or @@ -123,15 +116,6 @@ public Long getMinIdleInstances() { return minIdleInstances; } - public EditInstancePool setNodeTypeFlexibility(NodeTypeFlexibility nodeTypeFlexibility) { - this.nodeTypeFlexibility = nodeTypeFlexibility; - return this; - } - - public NodeTypeFlexibility getNodeTypeFlexibility() { - return nodeTypeFlexibility; - } - public EditInstancePool setNodeTypeId(String nodeTypeId) { this.nodeTypeId = nodeTypeId; return this; @@ -153,7 +137,6 @@ public boolean equals(Object o) { && Objects.equals(instancePoolName, that.instancePoolName) && Objects.equals(maxCapacity, that.maxCapacity) && Objects.equals(minIdleInstances, that.minIdleInstances) - && Objects.equals(nodeTypeFlexibility, that.nodeTypeFlexibility) && Objects.equals(nodeTypeId, that.nodeTypeId); } @@ -166,7 +149,6 @@ public int hashCode() { instancePoolName, maxCapacity, minIdleInstances, - nodeTypeFlexibility, nodeTypeId); } @@ -179,7 +161,6 @@ public String toString() { .add("instancePoolName", instancePoolName) .add("maxCapacity", maxCapacity) .add("minIdleInstances", minIdleInstances) - .add("nodeTypeFlexibility", nodeTypeFlexibility) .add("nodeTypeId", nodeTypeId) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java index 25779f5c7..fb3c4aa48 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java @@ -36,6 +36,16 @@ public class Environment { @JsonProperty("dependencies") private Collection dependencies; + /** + * We renamed `client` to `environment_version` in notebook exports. This field is meant solely so + * that imported notebooks with `environment_version` can be deserialized correctly, in a + * backwards-compatible way (i.e. if `client` is specified instead of `environment_version`, it + * will be deserialized correctly). Do NOT use this field for any other purpose, e.g. notebook + * storage. This field is not yet exposed to customers (e.g. in the jobs API). + */ + @JsonProperty("environment_version") + private String environmentVersion; + /** * List of jar dependencies, should be string representing volume paths. For example: * `/Volumes/path/to/test.jar`. @@ -61,6 +71,15 @@ public Collection getDependencies() { return dependencies; } + public Environment setEnvironmentVersion(String environmentVersion) { + this.environmentVersion = environmentVersion; + return this; + } + + public String getEnvironmentVersion() { + return environmentVersion; + } + public Environment setJarDependencies(Collection jarDependencies) { this.jarDependencies = jarDependencies; return this; @@ -77,12 +96,13 @@ public boolean equals(Object o) { Environment that = (Environment) o; return Objects.equals(client, that.client) && Objects.equals(dependencies, that.dependencies) + && Objects.equals(environmentVersion, that.environmentVersion) && Objects.equals(jarDependencies, that.jarDependencies); } @Override public int hashCode() { - return Objects.hash(client, dependencies, jarDependencies); + return Objects.hash(client, dependencies, environmentVersion, jarDependencies); } @Override @@ -90,6 +110,7 @@ public String toString() { return new ToStringer(Environment.class) .add("client", client) .add("dependencies", dependencies) + .add("environmentVersion", environmentVersion) .add("jarDependencies", jarDependencies) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePool.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePool.java index 13f105576..97feb90b6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePool.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePool.java @@ -101,13 +101,6 @@ public class GetInstancePool { @JsonProperty("min_idle_instances") private Long minIdleInstances; - /** - * For Fleet-pool V2, this object contains the information about the alternate node type ids to - * use when attempting to launch a cluster if the node type id is not available. - */ - @JsonProperty("node_type_flexibility") - private NodeTypeFlexibility nodeTypeFlexibility; - /** * This field encodes, through a single value, the resources available to each of the Spark nodes * in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or @@ -250,15 +243,6 @@ public Long getMinIdleInstances() { return minIdleInstances; } - public GetInstancePool setNodeTypeFlexibility(NodeTypeFlexibility nodeTypeFlexibility) { - this.nodeTypeFlexibility = nodeTypeFlexibility; - return this; - } - - public NodeTypeFlexibility getNodeTypeFlexibility() { - return nodeTypeFlexibility; - } - public GetInstancePool setNodeTypeId(String nodeTypeId) { this.nodeTypeId = nodeTypeId; return this; @@ -331,7 +315,6 @@ public boolean equals(Object o) { && Objects.equals(instancePoolName, that.instancePoolName) && Objects.equals(maxCapacity, that.maxCapacity) && Objects.equals(minIdleInstances, that.minIdleInstances) - && Objects.equals(nodeTypeFlexibility, that.nodeTypeFlexibility) && Objects.equals(nodeTypeId, that.nodeTypeId) && Objects.equals(preloadedDockerImages, that.preloadedDockerImages) && Objects.equals(preloadedSparkVersions, that.preloadedSparkVersions) @@ -355,7 +338,6 @@ public int hashCode() { instancePoolName, maxCapacity, minIdleInstances, - nodeTypeFlexibility, nodeTypeId, preloadedDockerImages, preloadedSparkVersions, @@ -379,7 +361,6 @@ public String toString() { .add("instancePoolName", instancePoolName) .add("maxCapacity", maxCapacity) .add("minIdleInstances", minIdleInstances) - .add("nodeTypeFlexibility", nodeTypeFlexibility) .add("nodeTypeId", nodeTypeId) .add("preloadedDockerImages", preloadedDockerImages) .add("preloadedSparkVersions", preloadedSparkVersions) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStats.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStats.java index 849c15c52..f2fd58676 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStats.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStats.java @@ -101,13 +101,6 @@ public class InstancePoolAndStats { @JsonProperty("min_idle_instances") private Long minIdleInstances; - /** - * For Fleet-pool V2, this object contains the information about the alternate node type ids to - * use when attempting to launch a cluster if the node type id is not available. - */ - @JsonProperty("node_type_flexibility") - private NodeTypeFlexibility nodeTypeFlexibility; - /** * This field encodes, through a single value, the resources available to each of the Spark nodes * in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or @@ -250,15 +243,6 @@ public Long getMinIdleInstances() { return minIdleInstances; } - public InstancePoolAndStats setNodeTypeFlexibility(NodeTypeFlexibility nodeTypeFlexibility) { - this.nodeTypeFlexibility = nodeTypeFlexibility; - return this; - } - - public NodeTypeFlexibility getNodeTypeFlexibility() { - return nodeTypeFlexibility; - } - public InstancePoolAndStats setNodeTypeId(String nodeTypeId) { this.nodeTypeId = nodeTypeId; return this; @@ -332,7 +316,6 @@ public boolean equals(Object o) { && Objects.equals(instancePoolName, that.instancePoolName) && Objects.equals(maxCapacity, that.maxCapacity) && Objects.equals(minIdleInstances, that.minIdleInstances) - && Objects.equals(nodeTypeFlexibility, that.nodeTypeFlexibility) && Objects.equals(nodeTypeId, that.nodeTypeId) && Objects.equals(preloadedDockerImages, that.preloadedDockerImages) && Objects.equals(preloadedSparkVersions, that.preloadedSparkVersions) @@ -356,7 +339,6 @@ public int hashCode() { instancePoolName, maxCapacity, minIdleInstances, - nodeTypeFlexibility, nodeTypeId, preloadedDockerImages, preloadedSparkVersions, @@ -380,7 +362,6 @@ public String toString() { .add("instancePoolName", instancePoolName) .add("maxCapacity", maxCapacity) .add("minIdleInstances", minIdleInstances) - .add("nodeTypeFlexibility", nodeTypeFlexibility) .add("nodeTypeId", nodeTypeId) .add("preloadedDockerImages", preloadedDockerImages) .add("preloadedSparkVersions", preloadedSparkVersions) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java index 35cd4d6b1..5d8ea0584 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java @@ -15,6 +15,9 @@ public enum MessageErrorType { COULD_NOT_GET_MODEL_DEPLOYMENTS_EXCEPTION, COULD_NOT_GET_UC_SCHEMA_EXCEPTION, DEPLOYMENT_NOT_FOUND_EXCEPTION, + DESCRIBE_QUERY_INVALID_SQL_ERROR, + DESCRIBE_QUERY_TIMEOUT, + DESCRIBE_QUERY_UNEXPECTED_FAILURE, FUNCTIONS_NOT_AVAILABLE_EXCEPTION, FUNCTION_ARGUMENTS_INVALID_EXCEPTION, FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION, @@ -27,9 +30,13 @@ public enum MessageErrorType { ILLEGAL_PARAMETER_DEFINITION_EXCEPTION, INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION, INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION, + INVALID_CHAT_COMPLETION_ARGUMENTS_JSON_EXCEPTION, INVALID_CHAT_COMPLETION_JSON_EXCEPTION, INVALID_COMPLETION_REQUEST_EXCEPTION, INVALID_FUNCTION_CALL_EXCEPTION, + INVALID_SQL_MULTIPLE_DATASET_REFERENCES_EXCEPTION, + INVALID_SQL_MULTIPLE_STATEMENTS_EXCEPTION, + INVALID_SQL_UNKNOWN_TABLE_EXCEPTION, INVALID_TABLE_IDENTIFIER_EXCEPTION, LOCAL_CONTEXT_EXCEEDED_EXCEPTION, MESSAGE_CANCELLED_WHILE_EXECUTING_EXCEPTION, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequest.java index fb054306c..d48921d05 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequest.java @@ -10,10 +10,15 @@ /** Get object permission levels */ @Generated public class GetPermissionLevelsRequest { - /** */ + /** */ @JsonIgnore private String requestObjectId; - /** */ + /** + * The type of the request object. Can be one of the following: alerts, authorization, clusters, + * cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, + * instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, + * serving-endpoints, or warehouses. + */ @JsonIgnore private String requestObjectType; public GetPermissionLevelsRequest setRequestObjectId(String requestObjectId) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionLevel.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionLevel.java index 0d1779ef2..f70dc9e56 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionLevel.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionLevel.java @@ -9,6 +9,7 @@ public enum PermissionLevel { CAN_ATTACH_TO, CAN_BIND, + CAN_CREATE, CAN_EDIT, CAN_EDIT_METADATA, CAN_MANAGE, @@ -16,6 +17,7 @@ public enum PermissionLevel { CAN_MANAGE_RUN, CAN_MANAGE_STAGING_VERSIONS, CAN_MONITOR, + CAN_MONITOR_ONLY, CAN_QUERY, CAN_READ, CAN_RESTART, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java index 521824587..4e4ef27aa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java @@ -8,53 +8,27 @@ /** * Permissions API are used to create read, write, edit, update and manage access for various users - * on different objects and endpoints. - * - *

* **[Apps permissions](:service:apps)** — Manage which users can manage or use apps. - * - *

* **[Cluster permissions](:service:clusters)** — Manage which users can manage, restart, or - * attach to clusters. - * - *

* **[Cluster policy permissions](:service:clusterpolicies)** — Manage which users can use - * cluster policies. - * - *

* **[Delta Live Tables pipeline permissions](:service:pipelines)** — Manage which users can - * view, manage, run, cancel, or own a Delta Live Tables pipeline. - * - *

* **[Job permissions](:service:jobs)** — Manage which users can view, manage, trigger, cancel, - * or own a job. - * - *

* **[MLflow experiment permissions](:service:experiments)** — Manage which users can read, - * edit, or manage MLflow experiments. - * - *

* **[MLflow registered model permissions](:service:modelregistry)** — Manage which users can - * read, edit, or manage MLflow registered models. - * - *

* **[Password permissions](:service:users)** — Manage which users can use password login when - * SSO is enabled. - * - *

* **[Instance Pool permissions](:service:instancepools)** — Manage which users can manage or - * attach to pools. - * - *

* **[Repo permissions](repos)** — Manage which users can read, run, edit, or manage a repo. - * - *

* **[Serving endpoint permissions](:service:servingendpoints)** — Manage which users can view, - * query, or manage a serving endpoint. - * - *

* **[SQL warehouse permissions](:service:warehouses)** — Manage which users can use or manage - * SQL warehouses. - * - *

* **[Token permissions](:service:tokenmanagement)** — Manage which users can create or use - * tokens. - * - *

* **[Workspace object permissions](:service:workspace)** — Manage which users can read, run, - * edit, or manage alerts, dbsql-dashboards, directories, files, notebooks and queries. - * - *

For the mapping of the required permissions for specific actions or abilities and other - * important information, see [Access Control]. - * - *

Note that to manage access control on service principals, use **[Account Access Control - * Proxy](:service:accountaccesscontrolproxy)**. + * on different objects and endpoints. * **[Apps permissions](:service:apps)** — Manage which users + * can manage or use apps. * **[Cluster permissions](:service:clusters)** — Manage which users can + * manage, restart, or attach to clusters. * **[Cluster policy + * permissions](:service:clusterpolicies)** — Manage which users can use cluster policies. * + * **[Delta Live Tables pipeline permissions](:service:pipelines)** — Manage which users can view, + * manage, run, cancel, or own a Delta Live Tables pipeline. * **[Job permissions](:service:jobs)** + * — Manage which users can view, manage, trigger, cancel, or own a job. * **[MLflow experiment + * permissions](:service:experiments)** — Manage which users can read, edit, or manage MLflow + * experiments. * **[MLflow registered model permissions](:service:modelregistry)** — Manage which + * users can read, edit, or manage MLflow registered models. * **[Instance Pool + * permissions](:service:instancepools)** — Manage which users can manage or attach to pools. * + * **[Repo permissions](repos)** — Manage which users can read, run, edit, or manage a repo. * + * **[Serving endpoint permissions](:service:servingendpoints)** — Manage which users can view, + * query, or manage a serving endpoint. * **[SQL warehouse permissions](:service:warehouses)** — + * Manage which users can use or manage SQL warehouses. * **[Token + * permissions](:service:tokenmanagement)** — Manage which users can create or use tokens. * + * **[Workspace object permissions](:service:workspace)** — Manage which users can read, run, edit, + * or manage alerts, dbsql-dashboards, directories, files, notebooks and queries. For the mapping of + * the required permissions for specific actions or abilities and other important information, see + * [Access Control]. Note that to manage access control on service principals, use **[Account Access + * Control Proxy](:service:accountaccesscontrolproxy)**. * *

[Access Control]: https://docs.databricks.com/security/auth-authz/access-control/index.html */ @@ -110,7 +84,7 @@ public GetPermissionLevelsResponse getPermissionLevels(GetPermissionLevelsReques public ObjectPermissions set(String requestObjectType, String requestObjectId) { return set( - new PermissionsRequest() + new SetObjectPermissions() .setRequestObjectType(requestObjectType) .setRequestObjectId(requestObjectId)); } @@ -122,13 +96,13 @@ public ObjectPermissions set(String requestObjectType, String requestObjectId) { * direct permissions if none are specified. Objects can inherit permissions from their parent * objects or root object. */ - public ObjectPermissions set(PermissionsRequest request) { + public ObjectPermissions set(SetObjectPermissions request) { return impl.set(request); } public ObjectPermissions update(String requestObjectType, String requestObjectId) { return update( - new PermissionsRequest() + new UpdateObjectPermissions() .setRequestObjectType(requestObjectType) .setRequestObjectId(requestObjectId)); } @@ -139,7 +113,7 @@ public ObjectPermissions update(String requestObjectType, String requestObjectId *

Updates the permissions on an object. Objects can inherit permissions from their parent * objects or root object. */ - public ObjectPermissions update(PermissionsRequest request) { + public ObjectPermissions update(UpdateObjectPermissions request) { return impl.update(request); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsImpl.java index af9d0c100..379ed77ad 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsImpl.java @@ -49,7 +49,7 @@ public GetPermissionLevelsResponse getPermissionLevels(GetPermissionLevelsReques } @Override - public ObjectPermissions set(PermissionsRequest request) { + public ObjectPermissions set(SetObjectPermissions request) { String path = String.format( "/api/2.0/permissions/%s/%s", @@ -66,7 +66,7 @@ public ObjectPermissions set(PermissionsRequest request) { } @Override - public ObjectPermissions update(PermissionsRequest request) { + public ObjectPermissions update(UpdateObjectPermissions request) { String path = String.format( "/api/2.0/permissions/%s/%s", diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java index 03ccea94e..592dcea96 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java @@ -5,53 +5,27 @@ /** * Permissions API are used to create read, write, edit, update and manage access for various users - * on different objects and endpoints. - * - *

* **[Apps permissions](:service:apps)** — Manage which users can manage or use apps. - * - *

* **[Cluster permissions](:service:clusters)** — Manage which users can manage, restart, or - * attach to clusters. - * - *

* **[Cluster policy permissions](:service:clusterpolicies)** — Manage which users can use - * cluster policies. - * - *

* **[Delta Live Tables pipeline permissions](:service:pipelines)** — Manage which users can - * view, manage, run, cancel, or own a Delta Live Tables pipeline. - * - *

* **[Job permissions](:service:jobs)** — Manage which users can view, manage, trigger, cancel, - * or own a job. - * - *

* **[MLflow experiment permissions](:service:experiments)** — Manage which users can read, - * edit, or manage MLflow experiments. - * - *

* **[MLflow registered model permissions](:service:modelregistry)** — Manage which users can - * read, edit, or manage MLflow registered models. - * - *

* **[Password permissions](:service:users)** — Manage which users can use password login when - * SSO is enabled. - * - *

* **[Instance Pool permissions](:service:instancepools)** — Manage which users can manage or - * attach to pools. - * - *

* **[Repo permissions](repos)** — Manage which users can read, run, edit, or manage a repo. - * - *

* **[Serving endpoint permissions](:service:servingendpoints)** — Manage which users can view, - * query, or manage a serving endpoint. - * - *

* **[SQL warehouse permissions](:service:warehouses)** — Manage which users can use or manage - * SQL warehouses. - * - *

* **[Token permissions](:service:tokenmanagement)** — Manage which users can create or use - * tokens. - * - *

* **[Workspace object permissions](:service:workspace)** — Manage which users can read, run, - * edit, or manage alerts, dbsql-dashboards, directories, files, notebooks and queries. - * - *

For the mapping of the required permissions for specific actions or abilities and other - * important information, see [Access Control]. - * - *

Note that to manage access control on service principals, use **[Account Access Control - * Proxy](:service:accountaccesscontrolproxy)**. + * on different objects and endpoints. * **[Apps permissions](:service:apps)** — Manage which users + * can manage or use apps. * **[Cluster permissions](:service:clusters)** — Manage which users can + * manage, restart, or attach to clusters. * **[Cluster policy + * permissions](:service:clusterpolicies)** — Manage which users can use cluster policies. * + * **[Delta Live Tables pipeline permissions](:service:pipelines)** — Manage which users can view, + * manage, run, cancel, or own a Delta Live Tables pipeline. * **[Job permissions](:service:jobs)** + * — Manage which users can view, manage, trigger, cancel, or own a job. * **[MLflow experiment + * permissions](:service:experiments)** — Manage which users can read, edit, or manage MLflow + * experiments. * **[MLflow registered model permissions](:service:modelregistry)** — Manage which + * users can read, edit, or manage MLflow registered models. * **[Instance Pool + * permissions](:service:instancepools)** — Manage which users can manage or attach to pools. * + * **[Repo permissions](repos)** — Manage which users can read, run, edit, or manage a repo. * + * **[Serving endpoint permissions](:service:servingendpoints)** — Manage which users can view, + * query, or manage a serving endpoint. * **[SQL warehouse permissions](:service:warehouses)** — + * Manage which users can use or manage SQL warehouses. * **[Token + * permissions](:service:tokenmanagement)** — Manage which users can create or use tokens. * + * **[Workspace object permissions](:service:workspace)** — Manage which users can read, run, edit, + * or manage alerts, dbsql-dashboards, directories, files, notebooks and queries. For the mapping of + * the required permissions for specific actions or abilities and other important information, see + * [Access Control]. Note that to manage access control on service principals, use **[Account Access + * Control Proxy](:service:accountaccesscontrolproxy)**. * *

[Access Control]: https://docs.databricks.com/security/auth-authz/access-control/index.html * @@ -84,7 +58,7 @@ GetPermissionLevelsResponse getPermissionLevels( * direct permissions if none are specified. Objects can inherit permissions from their parent * objects or root object. */ - ObjectPermissions set(PermissionsRequest permissionsRequest); + ObjectPermissions set(SetObjectPermissions setObjectPermissions); /** * Update object permissions. @@ -92,5 +66,5 @@ GetPermissionLevelsResponse getPermissionLevels( *

Updates the permissions on an object. Objects can inherit permissions from their parent * objects or root object. */ - ObjectPermissions update(PermissionsRequest permissionsRequest); + ObjectPermissions update(UpdateObjectPermissions updateObjectPermissions); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java similarity index 85% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java index 1faafbab4..0f3b404da 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java @@ -10,7 +10,7 @@ import java.util.Objects; @Generated -public class PermissionsRequest { +public class SetObjectPermissions { /** */ @JsonProperty("access_control_list") private Collection accessControlList; @@ -26,7 +26,7 @@ public class PermissionsRequest { */ @JsonIgnore private String requestObjectType; - public PermissionsRequest setAccessControlList( + public SetObjectPermissions setAccessControlList( Collection accessControlList) { this.accessControlList = accessControlList; return this; @@ -36,7 +36,7 @@ public Collection getAccessControlList() { return accessControlList; } - public PermissionsRequest setRequestObjectId(String requestObjectId) { + public SetObjectPermissions setRequestObjectId(String requestObjectId) { this.requestObjectId = requestObjectId; return this; } @@ -45,7 +45,7 @@ public String getRequestObjectId() { return requestObjectId; } - public PermissionsRequest setRequestObjectType(String requestObjectType) { + public SetObjectPermissions setRequestObjectType(String requestObjectType) { this.requestObjectType = requestObjectType; return this; } @@ -58,7 +58,7 @@ public String getRequestObjectType() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - PermissionsRequest that = (PermissionsRequest) o; + SetObjectPermissions that = (SetObjectPermissions) o; return Objects.equals(accessControlList, that.accessControlList) && Objects.equals(requestObjectId, that.requestObjectId) && Objects.equals(requestObjectType, that.requestObjectType); @@ -71,7 +71,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(PermissionsRequest.class) + return new ToStringer(SetObjectPermissions.class) .add("accessControlList", accessControlList) .add("requestObjectId", requestObjectId) .add("requestObjectType", requestObjectType) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java new file mode 100755 index 000000000..21ce2f907 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java @@ -0,0 +1,80 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class UpdateObjectPermissions { + /** */ + @JsonProperty("access_control_list") + private Collection accessControlList; + + /** The id of the request object. */ + @JsonIgnore private String requestObjectId; + + /** + * The type of the request object. Can be one of the following: alerts, authorization, clusters, + * cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, + * instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, + * serving-endpoints, or warehouses. + */ + @JsonIgnore private String requestObjectType; + + public UpdateObjectPermissions setAccessControlList( + Collection accessControlList) { + this.accessControlList = accessControlList; + return this; + } + + public Collection getAccessControlList() { + return accessControlList; + } + + public UpdateObjectPermissions setRequestObjectId(String requestObjectId) { + this.requestObjectId = requestObjectId; + return this; + } + + public String getRequestObjectId() { + return requestObjectId; + } + + public UpdateObjectPermissions setRequestObjectType(String requestObjectType) { + this.requestObjectType = requestObjectType; + return this; + } + + public String getRequestObjectType() { + return requestObjectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateObjectPermissions that = (UpdateObjectPermissions) o; + return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(requestObjectId, that.requestObjectId) + && Objects.equals(requestObjectType, that.requestObjectType); + } + + @Override + public int hashCode() { + return Objects.hash(accessControlList, requestObjectId, requestObjectType); + } + + @Override + public String toString() { + return new ToStringer(UpdateObjectPermissions.class) + .add("accessControlList", accessControlList) + .add("requestObjectId", requestObjectId) + .add("requestObjectType", requestObjectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationCodeCode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationCodeCode.java index b2c9cf16a..7d719bb94 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationCodeCode.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationCodeCode.java @@ -6,26 +6,27 @@ /** * The code indicates why the run was terminated. Additional codes might be introduced in future - * releases. * `SUCCESS`: The run was completed successfully. * `USER_CANCELED`: The run was - * successfully canceled during execution by a user. * `CANCELED`: The run was canceled during - * execution by the Databricks platform; for example, if the maximum run duration was exceeded. * - * `SKIPPED`: Run was never executed, for example, if the upstream task run failed, the dependency - * type condition was not met, or there were no material tasks to execute. * `INTERNAL_ERROR`: The - * run encountered an unexpected error. Refer to the state message for further details. * - * `DRIVER_ERROR`: The run encountered an error while communicating with the Spark Driver. * - * `CLUSTER_ERROR`: The run failed due to a cluster error. Refer to the state message for further - * details. * `REPOSITORY_CHECKOUT_FAILED`: Failed to complete the checkout due to an error when - * communicating with the third party service. * `INVALID_CLUSTER_REQUEST`: The run failed because - * it issued an invalid request to start the cluster. * `WORKSPACE_RUN_LIMIT_EXCEEDED`: The - * workspace has reached the quota for the maximum number of concurrent active runs. Consider - * scheduling the runs over a larger time frame. * `FEATURE_DISABLED`: The run failed because it - * tried to access a feature unavailable for the workspace. * `CLUSTER_REQUEST_LIMIT_EXCEEDED`: The - * number of cluster creation, start, and upsize requests have exceeded the allotted rate limit. - * Consider spreading the run execution over a larger time frame. * `STORAGE_ACCESS_ERROR`: The run - * failed due to an error when accessing the customer blob storage. Refer to the state message for - * further details. * `RUN_EXECUTION_ERROR`: The run was completed with task failures. For more - * details, refer to the state message or run output. * `UNAUTHORIZED_ERROR`: The run failed due to - * a permission issue while accessing a resource. Refer to the state message for further details. * + * releases. * `SUCCESS`: The run was completed successfully. * `SUCCESS_WITH_FAILURES`: The run was + * completed successfully but some child runs failed. * `USER_CANCELED`: The run was successfully + * canceled during execution by a user. * `CANCELED`: The run was canceled during execution by the + * Databricks platform; for example, if the maximum run duration was exceeded. * `SKIPPED`: Run was + * never executed, for example, if the upstream task run failed, the dependency type condition was + * not met, or there were no material tasks to execute. * `INTERNAL_ERROR`: The run encountered an + * unexpected error. Refer to the state message for further details. * `DRIVER_ERROR`: The run + * encountered an error while communicating with the Spark Driver. * `CLUSTER_ERROR`: The run failed + * due to a cluster error. Refer to the state message for further details. * + * `REPOSITORY_CHECKOUT_FAILED`: Failed to complete the checkout due to an error when communicating + * with the third party service. * `INVALID_CLUSTER_REQUEST`: The run failed because it issued an + * invalid request to start the cluster. * `WORKSPACE_RUN_LIMIT_EXCEEDED`: The workspace has reached + * the quota for the maximum number of concurrent active runs. Consider scheduling the runs over a + * larger time frame. * `FEATURE_DISABLED`: The run failed because it tried to access a feature + * unavailable for the workspace. * `CLUSTER_REQUEST_LIMIT_EXCEEDED`: The number of cluster + * creation, start, and upsize requests have exceeded the allotted rate limit. Consider spreading + * the run execution over a larger time frame. * `STORAGE_ACCESS_ERROR`: The run failed due to an + * error when accessing the customer blob storage. Refer to the state message for further details. * + * `RUN_EXECUTION_ERROR`: The run was completed with task failures. For more details, refer to the + * state message or run output. * `UNAUTHORIZED_ERROR`: The run failed due to a permission issue + * while accessing a resource. Refer to the state message for further details. * * `LIBRARY_INSTALLATION_ERROR`: The run failed while installing the user-requested library. Refer * to the state message for further details. The causes might include, but are not limited to: The * provided library is invalid, there are insufficient permissions to install the library, and so @@ -95,6 +96,7 @@ public enum TerminationCodeCode { STORAGE_ACCESS_ERROR, // The run failed due to an error when accessing the customer blob storage. // Refer to the state message for further details. SUCCESS, // The run was completed successfully. + SUCCESS_WITH_FAILURES, // The run was completed successfully but some child runs failed. UNAUTHORIZED_ERROR, // The run failed due to a permission issue while accessing a resource. Refer // to // the state message for further details. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationDetails.java index f3db81235..73025923e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationDetails.java @@ -11,7 +11,8 @@ public class TerminationDetails { /** * The code indicates why the run was terminated. Additional codes might be introduced in future - * releases. * `SUCCESS`: The run was completed successfully. * `USER_CANCELED`: The run was + * releases. * `SUCCESS`: The run was completed successfully. * `SUCCESS_WITH_FAILURES`: The run + * was completed successfully but some child runs failed. * `USER_CANCELED`: The run was * successfully canceled during execution by a user. * `CANCELED`: The run was canceled during * execution by the Databricks platform; for example, if the maximum run duration was exceeded. * * `SKIPPED`: Run was never executed, for example, if the upstream task run failed, the dependency diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java index 8cd10563f..1471fd886 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java @@ -31,6 +31,13 @@ public class IngestionPipelineDefinition { @JsonProperty("objects") private Collection objects; + /** + * The type of the foreign source. The source type will be inferred from the source connection or + * ingestion gateway. This field is output only and will be ignored if provided. + */ + @JsonProperty("source_type") + private IngestionSourceType sourceType; + /** * Configuration settings to control the ingestion of tables. These settings are applied to all * tables in the pipeline. @@ -65,6 +72,15 @@ public Collection getObjects() { return objects; } + public IngestionPipelineDefinition setSourceType(IngestionSourceType sourceType) { + this.sourceType = sourceType; + return this; + } + + public IngestionSourceType getSourceType() { + return sourceType; + } + public IngestionPipelineDefinition setTableConfiguration(TableSpecificConfig tableConfiguration) { this.tableConfiguration = tableConfiguration; return this; @@ -82,12 +98,14 @@ public boolean equals(Object o) { return Objects.equals(connectionName, that.connectionName) && Objects.equals(ingestionGatewayId, that.ingestionGatewayId) && Objects.equals(objects, that.objects) + && Objects.equals(sourceType, that.sourceType) && Objects.equals(tableConfiguration, that.tableConfiguration); } @Override public int hashCode() { - return Objects.hash(connectionName, ingestionGatewayId, objects, tableConfiguration); + return Objects.hash( + connectionName, ingestionGatewayId, objects, sourceType, tableConfiguration); } @Override @@ -96,6 +114,7 @@ public String toString() { .add("connectionName", connectionName) .add("ingestionGatewayId", ingestionGatewayId) .add("objects", objects) + .add("sourceType", sourceType) .add("tableConfiguration", tableConfiguration) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionSourceType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionSourceType.java new file mode 100755 index 000000000..c7620bc7f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionSourceType.java @@ -0,0 +1,21 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum IngestionSourceType { + DYNAMICS365, + GA4_RAW_DATA, + MANAGED_POSTGRESQL, + MYSQL, + NETSUITE, + ORACLE, + POSTGRESQL, + SALESFORCE, + SERVICENOW, + SHAREPOINT, + SQLSERVER, + WORKDAY_RAAS, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PathPattern.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PathPattern.java new file mode 100755 index 000000000..2a764890d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PathPattern.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class PathPattern { + /** The source code to include for pipelines */ + @JsonProperty("include") + private String include; + + public PathPattern setInclude(String include) { + this.include = include; + return this; + } + + public String getInclude() { + return include; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PathPattern that = (PathPattern) o; + return Objects.equals(include, that.include); + } + + @Override + public int hashCode() { + return Objects.hash(include); + } + + @Override + public String toString() { + return new ToStringer(PathPattern.class).add("include", include).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineLibrary.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineLibrary.java index 7954e8735..45f63133b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineLibrary.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineLibrary.java @@ -13,6 +13,13 @@ public class PipelineLibrary { @JsonProperty("file") private FileLibrary file; + /** + * The unified field to include source codes. Each entry can be a notebook path, a file path, or a + * folder path that ends `/**`. This field cannot be used together with `notebook` or `file`. + */ + @JsonProperty("glob") + private PathPattern glob; + /** URI of the jar to be installed. Currently only DBFS is supported. */ @JsonProperty("jar") private String jar; @@ -38,6 +45,15 @@ public FileLibrary getFile() { return file; } + public PipelineLibrary setGlob(PathPattern glob) { + this.glob = glob; + return this; + } + + public PathPattern getGlob() { + return glob; + } + public PipelineLibrary setJar(String jar) { this.jar = jar; return this; @@ -80,6 +96,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; PipelineLibrary that = (PipelineLibrary) o; return Objects.equals(file, that.file) + && Objects.equals(glob, that.glob) && Objects.equals(jar, that.jar) && Objects.equals(maven, that.maven) && Objects.equals(notebook, that.notebook) @@ -88,13 +105,14 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(file, jar, maven, notebook, whl); + return Objects.hash(file, glob, jar, maven, notebook, whl); } @Override public String toString() { return new ToStringer(PipelineLibrary.class) .add("file", file) + .add("glob", glob) .add("jar", jar) .add("maven", maven) .add("notebook", notebook) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdateCause.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdateCause.java index de125d3d2..72a8b29d1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdateCause.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdateCause.java @@ -8,6 +8,7 @@ @Generated public enum StartUpdateCause { API_CALL, + INFRASTRUCTURE_MAINTENANCE, JOB_TASK, RETRY_ON_FAILURE, SCHEMA_CHANGE, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateInfoCause.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateInfoCause.java index 5369cd532..74b0de357 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateInfoCause.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateInfoCause.java @@ -8,6 +8,7 @@ @Generated public enum UpdateInfoCause { API_CALL, + INFRASTRUCTURE_MAINTENANCE, JOB_TASK, RETRY_ON_FAILURE, SCHEMA_CHANGE, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreatePtEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreatePtEndpointRequest.java new file mode 100755 index 000000000..e7ae83feb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreatePtEndpointRequest.java @@ -0,0 +1,108 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class CreatePtEndpointRequest { + /** The AI Gateway configuration for the serving endpoint. */ + @JsonProperty("ai_gateway") + private AiGatewayConfig aiGateway; + + /** The budget policy associated with the endpoint. */ + @JsonProperty("budget_policy_id") + private String budgetPolicyId; + + /** The core config of the serving endpoint. */ + @JsonProperty("config") + private PtEndpointCoreConfig config; + + /** + * The name of the serving endpoint. This field is required and must be unique across a Databricks + * workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores. + */ + @JsonProperty("name") + private String name; + + /** Tags to be attached to the serving endpoint and automatically propagated to billing logs. */ + @JsonProperty("tags") + private Collection tags; + + public CreatePtEndpointRequest setAiGateway(AiGatewayConfig aiGateway) { + this.aiGateway = aiGateway; + return this; + } + + public AiGatewayConfig getAiGateway() { + return aiGateway; + } + + public CreatePtEndpointRequest setBudgetPolicyId(String budgetPolicyId) { + this.budgetPolicyId = budgetPolicyId; + return this; + } + + public String getBudgetPolicyId() { + return budgetPolicyId; + } + + public CreatePtEndpointRequest setConfig(PtEndpointCoreConfig config) { + this.config = config; + return this; + } + + public PtEndpointCoreConfig getConfig() { + return config; + } + + public CreatePtEndpointRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreatePtEndpointRequest setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreatePtEndpointRequest that = (CreatePtEndpointRequest) o; + return Objects.equals(aiGateway, that.aiGateway) + && Objects.equals(budgetPolicyId, that.budgetPolicyId) + && Objects.equals(config, that.config) + && Objects.equals(name, that.name) + && Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash(aiGateway, budgetPolicyId, config, name, tags); + } + + @Override + public String toString() { + return new ToStringer(CreatePtEndpointRequest.class) + .add("aiGateway", aiGateway) + .add("budgetPolicyId", budgetPolicyId) + .add("config", config) + .add("name", name) + .add("tags", tags) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PtEndpointCoreConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PtEndpointCoreConfig.java new file mode 100755 index 000000000..d0a1c2c0c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PtEndpointCoreConfig.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class PtEndpointCoreConfig { + /** The list of served entities under the serving endpoint config. */ + @JsonProperty("served_entities") + private Collection servedEntities; + + /** */ + @JsonProperty("traffic_config") + private TrafficConfig trafficConfig; + + public PtEndpointCoreConfig setServedEntities(Collection servedEntities) { + this.servedEntities = servedEntities; + return this; + } + + public Collection getServedEntities() { + return servedEntities; + } + + public PtEndpointCoreConfig setTrafficConfig(TrafficConfig trafficConfig) { + this.trafficConfig = trafficConfig; + return this; + } + + public TrafficConfig getTrafficConfig() { + return trafficConfig; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PtEndpointCoreConfig that = (PtEndpointCoreConfig) o; + return Objects.equals(servedEntities, that.servedEntities) + && Objects.equals(trafficConfig, that.trafficConfig); + } + + @Override + public int hashCode() { + return Objects.hash(servedEntities, trafficConfig); + } + + @Override + public String toString() { + return new ToStringer(PtEndpointCoreConfig.class) + .add("servedEntities", servedEntities) + .add("trafficConfig", trafficConfig) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PtServedModel.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PtServedModel.java new file mode 100755 index 000000000..a785267c9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PtServedModel.java @@ -0,0 +1,99 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class PtServedModel { + /** + * The name of the entity to be served. The entity may be a model in the Databricks Model + * Registry, a model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If + * it is a UC object, the full name of the object should be given in the form of + * **catalog_name.schema_name.model_name**. + */ + @JsonProperty("entity_name") + private String entityName; + + /** */ + @JsonProperty("entity_version") + private String entityVersion; + + /** + * The name of a served entity. It must be unique across an endpoint. A served entity name can + * consist of alphanumeric characters, dashes, and underscores. If not specified for an external + * model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if + * not specified for other entities, it defaults to entity_name-entity_version. + */ + @JsonProperty("name") + private String name; + + /** The number of model units to be provisioned. */ + @JsonProperty("provisioned_model_units") + private Long provisionedModelUnits; + + public PtServedModel setEntityName(String entityName) { + this.entityName = entityName; + return this; + } + + public String getEntityName() { + return entityName; + } + + public PtServedModel setEntityVersion(String entityVersion) { + this.entityVersion = entityVersion; + return this; + } + + public String getEntityVersion() { + return entityVersion; + } + + public PtServedModel setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public PtServedModel setProvisionedModelUnits(Long provisionedModelUnits) { + this.provisionedModelUnits = provisionedModelUnits; + return this; + } + + public Long getProvisionedModelUnits() { + return provisionedModelUnits; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PtServedModel that = (PtServedModel) o; + return Objects.equals(entityName, that.entityName) + && Objects.equals(entityVersion, that.entityVersion) + && Objects.equals(name, that.name) + && Objects.equals(provisionedModelUnits, that.provisionedModelUnits); + } + + @Override + public int hashCode() { + return Objects.hash(entityName, entityVersion, name, provisionedModelUnits); + } + + @Override + public String toString() { + return new ToStringer(PtServedModel.class) + .add("entityName", entityName) + .add("entityVersion", entityVersion) + .add("name", name) + .add("provisionedModelUnits", provisionedModelUnits) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityInput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityInput.java index e323cbca0..9e9593df2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityInput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityInput.java @@ -65,6 +65,10 @@ public class ServedEntityInput { @JsonProperty("name") private String name; + /** The number of model units provisioned. */ + @JsonProperty("provisioned_model_units") + private Long provisionedModelUnits; + /** Whether the compute resources for the served entity should scale down to zero. */ @JsonProperty("scale_to_zero_enabled") private Boolean scaleToZeroEnabled; @@ -165,6 +169,15 @@ public String getName() { return name; } + public ServedEntityInput setProvisionedModelUnits(Long provisionedModelUnits) { + this.provisionedModelUnits = provisionedModelUnits; + return this; + } + + public Long getProvisionedModelUnits() { + return provisionedModelUnits; + } + public ServedEntityInput setScaleToZeroEnabled(Boolean scaleToZeroEnabled) { this.scaleToZeroEnabled = scaleToZeroEnabled; return this; @@ -205,6 +218,7 @@ public boolean equals(Object o) { && Objects.equals(maxProvisionedThroughput, that.maxProvisionedThroughput) && Objects.equals(minProvisionedThroughput, that.minProvisionedThroughput) && Objects.equals(name, that.name) + && Objects.equals(provisionedModelUnits, that.provisionedModelUnits) && Objects.equals(scaleToZeroEnabled, that.scaleToZeroEnabled) && Objects.equals(workloadSize, that.workloadSize) && Objects.equals(workloadType, that.workloadType); @@ -221,6 +235,7 @@ public int hashCode() { maxProvisionedThroughput, minProvisionedThroughput, name, + provisionedModelUnits, scaleToZeroEnabled, workloadSize, workloadType); @@ -237,6 +252,7 @@ public String toString() { .add("maxProvisionedThroughput", maxProvisionedThroughput) .add("minProvisionedThroughput", minProvisionedThroughput) .add("name", name) + .add("provisionedModelUnits", provisionedModelUnits) .add("scaleToZeroEnabled", scaleToZeroEnabled) .add("workloadSize", workloadSize) .add("workloadType", workloadType) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityOutput.java index 9c656687b..74b58f742 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityOutput.java @@ -80,6 +80,10 @@ public class ServedEntityOutput { @JsonProperty("name") private String name; + /** The number of model units provisioned. */ + @JsonProperty("provisioned_model_units") + private Long provisionedModelUnits; + /** Whether the compute resources for the served entity should scale down to zero. */ @JsonProperty("scale_to_zero_enabled") private Boolean scaleToZeroEnabled; @@ -211,6 +215,15 @@ public String getName() { return name; } + public ServedEntityOutput setProvisionedModelUnits(Long provisionedModelUnits) { + this.provisionedModelUnits = provisionedModelUnits; + return this; + } + + public Long getProvisionedModelUnits() { + return provisionedModelUnits; + } + public ServedEntityOutput setScaleToZeroEnabled(Boolean scaleToZeroEnabled) { this.scaleToZeroEnabled = scaleToZeroEnabled; return this; @@ -263,6 +276,7 @@ public boolean equals(Object o) { && Objects.equals(maxProvisionedThroughput, that.maxProvisionedThroughput) && Objects.equals(minProvisionedThroughput, that.minProvisionedThroughput) && Objects.equals(name, that.name) + && Objects.equals(provisionedModelUnits, that.provisionedModelUnits) && Objects.equals(scaleToZeroEnabled, that.scaleToZeroEnabled) && Objects.equals(state, that.state) && Objects.equals(workloadSize, that.workloadSize) @@ -283,6 +297,7 @@ public int hashCode() { maxProvisionedThroughput, minProvisionedThroughput, name, + provisionedModelUnits, scaleToZeroEnabled, state, workloadSize, @@ -303,6 +318,7 @@ public String toString() { .add("maxProvisionedThroughput", maxProvisionedThroughput) .add("minProvisionedThroughput", minProvisionedThroughput) .add("name", name) + .add("provisionedModelUnits", provisionedModelUnits) .add("scaleToZeroEnabled", scaleToZeroEnabled) .add("state", state) .add("workloadSize", workloadSize) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInput.java index f03eec3a4..907d88d17 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInput.java @@ -48,6 +48,10 @@ public class ServedModelInput { @JsonProperty("name") private String name; + /** The number of model units provisioned. */ + @JsonProperty("provisioned_model_units") + private Long provisionedModelUnits; + /** Whether the compute resources for the served entity should scale down to zero. */ @JsonProperty("scale_to_zero_enabled") private Boolean scaleToZeroEnabled; @@ -139,6 +143,15 @@ public String getName() { return name; } + public ServedModelInput setProvisionedModelUnits(Long provisionedModelUnits) { + this.provisionedModelUnits = provisionedModelUnits; + return this; + } + + public Long getProvisionedModelUnits() { + return provisionedModelUnits; + } + public ServedModelInput setScaleToZeroEnabled(Boolean scaleToZeroEnabled) { this.scaleToZeroEnabled = scaleToZeroEnabled; return this; @@ -178,6 +191,7 @@ public boolean equals(Object o) { && Objects.equals(modelName, that.modelName) && Objects.equals(modelVersion, that.modelVersion) && Objects.equals(name, that.name) + && Objects.equals(provisionedModelUnits, that.provisionedModelUnits) && Objects.equals(scaleToZeroEnabled, that.scaleToZeroEnabled) && Objects.equals(workloadSize, that.workloadSize) && Objects.equals(workloadType, that.workloadType); @@ -193,6 +207,7 @@ public int hashCode() { modelName, modelVersion, name, + provisionedModelUnits, scaleToZeroEnabled, workloadSize, workloadType); @@ -208,6 +223,7 @@ public String toString() { .add("modelName", modelName) .add("modelVersion", modelVersion) .add("name", name) + .add("provisionedModelUnits", provisionedModelUnits) .add("scaleToZeroEnabled", scaleToZeroEnabled) .add("workloadSize", workloadSize) .add("workloadType", workloadType) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelOutput.java index a170e70bc..eabfc4a48 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelOutput.java @@ -48,6 +48,10 @@ public class ServedModelOutput { @JsonProperty("name") private String name; + /** The number of model units provisioned. */ + @JsonProperty("provisioned_model_units") + private Long provisionedModelUnits; + /** Whether the compute resources for the served entity should scale down to zero. */ @JsonProperty("scale_to_zero_enabled") private Boolean scaleToZeroEnabled; @@ -143,6 +147,15 @@ public String getName() { return name; } + public ServedModelOutput setProvisionedModelUnits(Long provisionedModelUnits) { + this.provisionedModelUnits = provisionedModelUnits; + return this; + } + + public Long getProvisionedModelUnits() { + return provisionedModelUnits; + } + public ServedModelOutput setScaleToZeroEnabled(Boolean scaleToZeroEnabled) { this.scaleToZeroEnabled = scaleToZeroEnabled; return this; @@ -191,6 +204,7 @@ public boolean equals(Object o) { && Objects.equals(modelName, that.modelName) && Objects.equals(modelVersion, that.modelVersion) && Objects.equals(name, that.name) + && Objects.equals(provisionedModelUnits, that.provisionedModelUnits) && Objects.equals(scaleToZeroEnabled, that.scaleToZeroEnabled) && Objects.equals(state, that.state) && Objects.equals(workloadSize, that.workloadSize) @@ -207,6 +221,7 @@ public int hashCode() { modelName, modelVersion, name, + provisionedModelUnits, scaleToZeroEnabled, state, workloadSize, @@ -223,6 +238,7 @@ public String toString() { .add("modelName", modelName) .add("modelVersion", modelVersion) .add("name", name) + .add("provisionedModelUnits", provisionedModelUnits) .add("scaleToZeroEnabled", scaleToZeroEnabled) .add("state", state) .add("workloadSize", workloadSize) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java index f044a180f..0380b75cb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java @@ -118,6 +118,22 @@ public Wait create( response); } + public Wait createProvisionedThroughputEndpoint( + String name, PtEndpointCoreConfig config) { + return createProvisionedThroughputEndpoint( + new CreatePtEndpointRequest().setName(name).setConfig(config)); + } + + /** Create a new PT serving endpoint. */ + public Wait createProvisionedThroughputEndpoint( + CreatePtEndpointRequest request) { + ServingEndpointDetailed response = impl.createProvisionedThroughputEndpoint(request); + return new Wait<>( + (timeout, callback) -> + waitGetServingEndpointNotUpdating(response.getName(), timeout, callback), + response); + } + public void delete(String name) { delete(new DeleteServingEndpointRequest().setName(name)); } @@ -332,6 +348,29 @@ public ServingEndpointPermissions updatePermissions(ServingEndpointPermissionsRe return impl.updatePermissions(request); } + public Wait + updateProvisionedThroughputEndpointConfig(String name, PtEndpointCoreConfig config) { + return updateProvisionedThroughputEndpointConfig( + new UpdateProvisionedThroughputEndpointConfigRequest().setName(name).setConfig(config)); + } + + /** + * Update config of a PT serving endpoint. + * + *

Updates any combination of the pt endpoint's served entities, the compute configuration of + * those served entities, and the endpoint's traffic config. Updates are instantaneous and + * endpoint should be updated instantly + */ + public Wait + updateProvisionedThroughputEndpointConfig( + UpdateProvisionedThroughputEndpointConfigRequest request) { + ServingEndpointDetailed response = impl.updateProvisionedThroughputEndpointConfig(request); + return new Wait<>( + (timeout, callback) -> + waitGetServingEndpointNotUpdating(response.getName(), timeout, callback), + response); + } + public ServingEndpointsService impl() { return impl; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java index 33b47d710..a6506b9dc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java @@ -46,6 +46,21 @@ public ServingEndpointDetailed create(CreateServingEndpoint request) { } } + @Override + public ServingEndpointDetailed createProvisionedThroughputEndpoint( + CreatePtEndpointRequest request) { + String path = "/api/2.0/serving-endpoints/pt"; + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ServingEndpointDetailed.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public void delete(DeleteServingEndpointRequest request) { String path = String.format("/api/2.0/serving-endpoints/%s", request.getName()); @@ -269,4 +284,19 @@ public ServingEndpointPermissions updatePermissions(ServingEndpointPermissionsRe throw new DatabricksException("IO error: " + e.getMessage(), e); } } + + @Override + public ServingEndpointDetailed updateProvisionedThroughputEndpointConfig( + UpdateProvisionedThroughputEndpointConfigRequest request) { + String path = String.format("/api/2.0/serving-endpoints/pt/%s/config", request.getName()); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ServingEndpointDetailed.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java index 42ee3b314..fe996b208 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java @@ -31,6 +31,10 @@ public interface ServingEndpointsService { /** Create a new serving endpoint. */ ServingEndpointDetailed create(CreateServingEndpoint createServingEndpoint); + /** Create a new PT serving endpoint. */ + ServingEndpointDetailed createProvisionedThroughputEndpoint( + CreatePtEndpointRequest createPtEndpointRequest); + /** Delete a serving endpoint. */ void delete(DeleteServingEndpointRequest deleteServingEndpointRequest); @@ -140,4 +144,15 @@ ServingEndpointPermissions setPermissions( */ ServingEndpointPermissions updatePermissions( ServingEndpointPermissionsRequest servingEndpointPermissionsRequest); + + /** + * Update config of a PT serving endpoint. + * + *

Updates any combination of the pt endpoint's served entities, the compute configuration of + * those served entities, and the endpoint's traffic config. Updates are instantaneous and + * endpoint should be updated instantly + */ + ServingEndpointDetailed updateProvisionedThroughputEndpointConfig( + UpdateProvisionedThroughputEndpointConfigRequest + updateProvisionedThroughputEndpointConfigRequest); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/UpdateProvisionedThroughputEndpointConfigRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/UpdateProvisionedThroughputEndpointConfigRequest.java new file mode 100755 index 000000000..997aa5fbb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/UpdateProvisionedThroughputEndpointConfigRequest.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateProvisionedThroughputEndpointConfigRequest { + /** */ + @JsonProperty("config") + private PtEndpointCoreConfig config; + + /** The name of the pt endpoint to update. This field is required. */ + @JsonIgnore private String name; + + public UpdateProvisionedThroughputEndpointConfigRequest setConfig(PtEndpointCoreConfig config) { + this.config = config; + return this; + } + + public PtEndpointCoreConfig getConfig() { + return config; + } + + public UpdateProvisionedThroughputEndpointConfigRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateProvisionedThroughputEndpointConfigRequest that = + (UpdateProvisionedThroughputEndpointConfigRequest) o; + return Objects.equals(config, that.config) && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(config, name); + } + + @Override + public String toString() { + return new ToStringer(UpdateProvisionedThroughputEndpointConfigRequest.class) + .add("config", config) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsAPI.java index 921d60bee..b343a30ed 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsAPI.java @@ -21,6 +21,10 @@ public class AccountSettingsAPI { private EsmEnablementAccountAPI esmEnablementAccountAPI; + private LlmProxyPartnerPoweredAccountAPI llmProxyPartnerPoweredAccountAPI; + + private LlmProxyPartnerPoweredEnforceAPI llmProxyPartnerPoweredEnforceAPI; + private PersonalComputeAPI personalComputeAPI; /** Regular-use constructor */ @@ -35,6 +39,10 @@ public AccountSettingsAPI(ApiClient apiClient) { esmEnablementAccountAPI = new EsmEnablementAccountAPI(apiClient); + llmProxyPartnerPoweredAccountAPI = new LlmProxyPartnerPoweredAccountAPI(apiClient); + + llmProxyPartnerPoweredEnforceAPI = new LlmProxyPartnerPoweredEnforceAPI(apiClient); + personalComputeAPI = new PersonalComputeAPI(apiClient); } @@ -69,6 +77,19 @@ public EsmEnablementAccountAPI EsmEnablementAccount() { return esmEnablementAccountAPI; } + /** Determines if partner powered models are enabled or not for a specific account. */ + public LlmProxyPartnerPoweredAccountAPI LlmProxyPartnerPoweredAccount() { + return llmProxyPartnerPoweredAccountAPI; + } + + /** + * Determines if the account-level partner-powered setting value is enforced upon the + * workspace-level partner-powered setting. + */ + public LlmProxyPartnerPoweredEnforceAPI LlmProxyPartnerPoweredEnforce() { + return llmProxyPartnerPoweredEnforceAPI; + } + /** * The Personal Compute enablement setting lets you control which users can use the Personal * Compute default policy to create compute resources. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceRequest.java new file mode 100755 index 000000000..b746ad732 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceRequest.java @@ -0,0 +1,55 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete the enable partner powered AI features workspace setting */ +@Generated +public class DeleteLlmProxyPartnerPoweredWorkspaceRequest { + /** + * etag used for versioning. The response is at least as fresh as the eTag provided. This is used + * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + * overwriting each other. It is strongly suggested that systems make use of the etag in the read + * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get + * an etag from a GET request, and pass it with the DELETE request to identify the rule set + * version you are deleting. + */ + @JsonIgnore + @QueryParam("etag") + private String etag; + + public DeleteLlmProxyPartnerPoweredWorkspaceRequest setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteLlmProxyPartnerPoweredWorkspaceRequest that = + (DeleteLlmProxyPartnerPoweredWorkspaceRequest) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(DeleteLlmProxyPartnerPoweredWorkspaceRequest.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceResponse.java new file mode 100755 index 000000000..10524ea66 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceResponse.java @@ -0,0 +1,53 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The etag is returned. */ +@Generated +public class DeleteLlmProxyPartnerPoweredWorkspaceResponse { + /** + * etag used for versioning. The response is at least as fresh as the eTag provided. This is used + * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + * overwriting each other. It is strongly suggested that systems make use of the etag in the read + * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get + * an etag from a GET request, and pass it with the DELETE request to identify the rule set + * version you are deleting. + */ + @JsonProperty("etag") + private String etag; + + public DeleteLlmProxyPartnerPoweredWorkspaceResponse setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteLlmProxyPartnerPoweredWorkspaceResponse that = + (DeleteLlmProxyPartnerPoweredWorkspaceResponse) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(DeleteLlmProxyPartnerPoweredWorkspaceResponse.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredAccountRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredAccountRequest.java new file mode 100755 index 000000000..d5fd149f4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredAccountRequest.java @@ -0,0 +1,54 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get the enable partner powered AI features account setting */ +@Generated +public class GetLlmProxyPartnerPoweredAccountRequest { + /** + * etag used for versioning. The response is at least as fresh as the eTag provided. This is used + * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + * overwriting each other. It is strongly suggested that systems make use of the etag in the read + * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get + * an etag from a GET request, and pass it with the DELETE request to identify the rule set + * version you are deleting. + */ + @JsonIgnore + @QueryParam("etag") + private String etag; + + public GetLlmProxyPartnerPoweredAccountRequest setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetLlmProxyPartnerPoweredAccountRequest that = (GetLlmProxyPartnerPoweredAccountRequest) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(GetLlmProxyPartnerPoweredAccountRequest.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredEnforceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredEnforceRequest.java new file mode 100755 index 000000000..63e690981 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredEnforceRequest.java @@ -0,0 +1,54 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get the enforcement status of partner powered AI features account setting */ +@Generated +public class GetLlmProxyPartnerPoweredEnforceRequest { + /** + * etag used for versioning. The response is at least as fresh as the eTag provided. This is used + * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + * overwriting each other. It is strongly suggested that systems make use of the etag in the read + * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get + * an etag from a GET request, and pass it with the DELETE request to identify the rule set + * version you are deleting. + */ + @JsonIgnore + @QueryParam("etag") + private String etag; + + public GetLlmProxyPartnerPoweredEnforceRequest setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetLlmProxyPartnerPoweredEnforceRequest that = (GetLlmProxyPartnerPoweredEnforceRequest) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(GetLlmProxyPartnerPoweredEnforceRequest.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredWorkspaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredWorkspaceRequest.java new file mode 100755 index 000000000..b149178f4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredWorkspaceRequest.java @@ -0,0 +1,54 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get the enable partner powered AI features workspace setting */ +@Generated +public class GetLlmProxyPartnerPoweredWorkspaceRequest { + /** + * etag used for versioning. The response is at least as fresh as the eTag provided. This is used + * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + * overwriting each other. It is strongly suggested that systems make use of the etag in the read + * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get + * an etag from a GET request, and pass it with the DELETE request to identify the rule set + * version you are deleting. + */ + @JsonIgnore + @QueryParam("etag") + private String etag; + + public GetLlmProxyPartnerPoweredWorkspaceRequest setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetLlmProxyPartnerPoweredWorkspaceRequest that = (GetLlmProxyPartnerPoweredWorkspaceRequest) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(GetLlmProxyPartnerPoweredWorkspaceRequest.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccount.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccount.java new file mode 100755 index 000000000..adb284ade --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccount.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class LlmProxyPartnerPoweredAccount { + /** */ + @JsonProperty("boolean_val") + private BooleanMessage booleanVal; + + /** + * etag used for versioning. The response is at least as fresh as the eTag provided. This is used + * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + * overwriting each other. It is strongly suggested that systems make use of the etag in the read + * -> update pattern to perform setting updates in order to avoid race conditions. That is, get an + * etag from a GET request, and pass it with the PATCH request to identify the setting version you + * are updating. + */ + @JsonProperty("etag") + private String etag; + + /** + * Name of the corresponding setting. This field is populated in the response, but it will not be + * respected even if it's set in the request body. The setting name in the path parameter will be + * respected instead. Setting name is required to be 'default' if the setting only has one + * instance per workspace. + */ + @JsonProperty("setting_name") + private String settingName; + + public LlmProxyPartnerPoweredAccount setBooleanVal(BooleanMessage booleanVal) { + this.booleanVal = booleanVal; + return this; + } + + public BooleanMessage getBooleanVal() { + return booleanVal; + } + + public LlmProxyPartnerPoweredAccount setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public LlmProxyPartnerPoweredAccount setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LlmProxyPartnerPoweredAccount that = (LlmProxyPartnerPoweredAccount) o; + return Objects.equals(booleanVal, that.booleanVal) + && Objects.equals(etag, that.etag) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(booleanVal, etag, settingName); + } + + @Override + public String toString() { + return new ToStringer(LlmProxyPartnerPoweredAccount.class) + .add("booleanVal", booleanVal) + .add("etag", etag) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountAPI.java new file mode 100755 index 000000000..c5b67f821 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountAPI.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Determines if partner powered models are enabled or not for a specific account */ +@Generated +public class LlmProxyPartnerPoweredAccountAPI { + private static final Logger LOG = LoggerFactory.getLogger(LlmProxyPartnerPoweredAccountAPI.class); + + private final LlmProxyPartnerPoweredAccountService impl; + + /** Regular-use constructor */ + public LlmProxyPartnerPoweredAccountAPI(ApiClient apiClient) { + impl = new LlmProxyPartnerPoweredAccountImpl(apiClient); + } + + /** Constructor for mocks */ + public LlmProxyPartnerPoweredAccountAPI(LlmProxyPartnerPoweredAccountService mock) { + impl = mock; + } + + /** + * Get the enable partner powered AI features account setting. + * + *

Gets the enable partner powered AI features account setting. + */ + public LlmProxyPartnerPoweredAccount get(GetLlmProxyPartnerPoweredAccountRequest request) { + return impl.get(request); + } + + public LlmProxyPartnerPoweredAccount update( + boolean allowMissing, LlmProxyPartnerPoweredAccount setting, String fieldMask) { + return update( + new UpdateLlmProxyPartnerPoweredAccountRequest() + .setAllowMissing(allowMissing) + .setSetting(setting) + .setFieldMask(fieldMask)); + } + + /** + * Update the enable partner powered AI features account setting. + * + *

Updates the enable partner powered AI features account setting. + */ + public LlmProxyPartnerPoweredAccount update(UpdateLlmProxyPartnerPoweredAccountRequest request) { + return impl.update(request); + } + + public LlmProxyPartnerPoweredAccountService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountImpl.java new file mode 100755 index 000000000..ab253c810 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountImpl.java @@ -0,0 +1,51 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.support.Generated; +import java.io.IOException; + +/** Package-local implementation of LlmProxyPartnerPoweredAccount */ +@Generated +class LlmProxyPartnerPoweredAccountImpl implements LlmProxyPartnerPoweredAccountService { + private final ApiClient apiClient; + + public LlmProxyPartnerPoweredAccountImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public LlmProxyPartnerPoweredAccount get(GetLlmProxyPartnerPoweredAccountRequest request) { + String path = + String.format( + "/api/2.0/accounts/%s/settings/types/llm_proxy_partner_powered/names/default", + apiClient.configuredAccountID()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, LlmProxyPartnerPoweredAccount.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public LlmProxyPartnerPoweredAccount update(UpdateLlmProxyPartnerPoweredAccountRequest request) { + String path = + String.format( + "/api/2.0/accounts/%s/settings/types/llm_proxy_partner_powered/names/default", + apiClient.configuredAccountID()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, LlmProxyPartnerPoweredAccount.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountService.java new file mode 100755 index 000000000..075f5e07f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredAccountService.java @@ -0,0 +1,30 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +/** + * Determines if partner powered models are enabled or not for a specific account + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface LlmProxyPartnerPoweredAccountService { + /** + * Get the enable partner powered AI features account setting. + * + *

Gets the enable partner powered AI features account setting. + */ + LlmProxyPartnerPoweredAccount get( + GetLlmProxyPartnerPoweredAccountRequest getLlmProxyPartnerPoweredAccountRequest); + + /** + * Update the enable partner powered AI features account setting. + * + *

Updates the enable partner powered AI features account setting. + */ + LlmProxyPartnerPoweredAccount update( + UpdateLlmProxyPartnerPoweredAccountRequest updateLlmProxyPartnerPoweredAccountRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforce.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforce.java new file mode 100755 index 000000000..653a3ddd6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforce.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class LlmProxyPartnerPoweredEnforce { + /** */ + @JsonProperty("boolean_val") + private BooleanMessage booleanVal; + + /** + * etag used for versioning. The response is at least as fresh as the eTag provided. This is used + * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + * overwriting each other. It is strongly suggested that systems make use of the etag in the read + * -> update pattern to perform setting updates in order to avoid race conditions. That is, get an + * etag from a GET request, and pass it with the PATCH request to identify the setting version you + * are updating. + */ + @JsonProperty("etag") + private String etag; + + /** + * Name of the corresponding setting. This field is populated in the response, but it will not be + * respected even if it's set in the request body. The setting name in the path parameter will be + * respected instead. Setting name is required to be 'default' if the setting only has one + * instance per workspace. + */ + @JsonProperty("setting_name") + private String settingName; + + public LlmProxyPartnerPoweredEnforce setBooleanVal(BooleanMessage booleanVal) { + this.booleanVal = booleanVal; + return this; + } + + public BooleanMessage getBooleanVal() { + return booleanVal; + } + + public LlmProxyPartnerPoweredEnforce setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public LlmProxyPartnerPoweredEnforce setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LlmProxyPartnerPoweredEnforce that = (LlmProxyPartnerPoweredEnforce) o; + return Objects.equals(booleanVal, that.booleanVal) + && Objects.equals(etag, that.etag) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(booleanVal, etag, settingName); + } + + @Override + public String toString() { + return new ToStringer(LlmProxyPartnerPoweredEnforce.class) + .add("booleanVal", booleanVal) + .add("etag", etag) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceAPI.java new file mode 100755 index 000000000..a706606fc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceAPI.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Determines if the account-level partner-powered setting value is enforced upon the + * workspace-level partner-powered setting + */ +@Generated +public class LlmProxyPartnerPoweredEnforceAPI { + private static final Logger LOG = LoggerFactory.getLogger(LlmProxyPartnerPoweredEnforceAPI.class); + + private final LlmProxyPartnerPoweredEnforceService impl; + + /** Regular-use constructor */ + public LlmProxyPartnerPoweredEnforceAPI(ApiClient apiClient) { + impl = new LlmProxyPartnerPoweredEnforceImpl(apiClient); + } + + /** Constructor for mocks */ + public LlmProxyPartnerPoweredEnforceAPI(LlmProxyPartnerPoweredEnforceService mock) { + impl = mock; + } + + /** + * Get the enforcement status of partner powered AI features account setting. + * + *

Gets the enforcement status of partner powered AI features account setting. + */ + public LlmProxyPartnerPoweredEnforce get(GetLlmProxyPartnerPoweredEnforceRequest request) { + return impl.get(request); + } + + public LlmProxyPartnerPoweredEnforce update( + boolean allowMissing, LlmProxyPartnerPoweredEnforce setting, String fieldMask) { + return update( + new UpdateLlmProxyPartnerPoweredEnforceRequest() + .setAllowMissing(allowMissing) + .setSetting(setting) + .setFieldMask(fieldMask)); + } + + /** + * Update the enforcement status of partner powered AI features account setting. + * + *

Updates the enable enforcement status of partner powered AI features account setting. + */ + public LlmProxyPartnerPoweredEnforce update(UpdateLlmProxyPartnerPoweredEnforceRequest request) { + return impl.update(request); + } + + public LlmProxyPartnerPoweredEnforceService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceImpl.java new file mode 100755 index 000000000..8ffdaee6c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceImpl.java @@ -0,0 +1,51 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.support.Generated; +import java.io.IOException; + +/** Package-local implementation of LlmProxyPartnerPoweredEnforce */ +@Generated +class LlmProxyPartnerPoweredEnforceImpl implements LlmProxyPartnerPoweredEnforceService { + private final ApiClient apiClient; + + public LlmProxyPartnerPoweredEnforceImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public LlmProxyPartnerPoweredEnforce get(GetLlmProxyPartnerPoweredEnforceRequest request) { + String path = + String.format( + "/api/2.0/accounts/%s/settings/types/llm_proxy_partner_powered_enforce/names/default", + apiClient.configuredAccountID()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, LlmProxyPartnerPoweredEnforce.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public LlmProxyPartnerPoweredEnforce update(UpdateLlmProxyPartnerPoweredEnforceRequest request) { + String path = + String.format( + "/api/2.0/accounts/%s/settings/types/llm_proxy_partner_powered_enforce/names/default", + apiClient.configuredAccountID()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, LlmProxyPartnerPoweredEnforce.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceService.java new file mode 100755 index 000000000..dcad9f4fc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredEnforceService.java @@ -0,0 +1,31 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +/** + * Determines if the account-level partner-powered setting value is enforced upon the + * workspace-level partner-powered setting + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface LlmProxyPartnerPoweredEnforceService { + /** + * Get the enforcement status of partner powered AI features account setting. + * + *

Gets the enforcement status of partner powered AI features account setting. + */ + LlmProxyPartnerPoweredEnforce get( + GetLlmProxyPartnerPoweredEnforceRequest getLlmProxyPartnerPoweredEnforceRequest); + + /** + * Update the enforcement status of partner powered AI features account setting. + * + *

Updates the enable enforcement status of partner powered AI features account setting. + */ + LlmProxyPartnerPoweredEnforce update( + UpdateLlmProxyPartnerPoweredEnforceRequest updateLlmProxyPartnerPoweredEnforceRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspace.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspace.java new file mode 100755 index 000000000..85cd8a171 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspace.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class LlmProxyPartnerPoweredWorkspace { + /** */ + @JsonProperty("boolean_val") + private BooleanMessage booleanVal; + + /** + * etag used for versioning. The response is at least as fresh as the eTag provided. This is used + * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + * overwriting each other. It is strongly suggested that systems make use of the etag in the read + * -> update pattern to perform setting updates in order to avoid race conditions. That is, get an + * etag from a GET request, and pass it with the PATCH request to identify the setting version you + * are updating. + */ + @JsonProperty("etag") + private String etag; + + /** + * Name of the corresponding setting. This field is populated in the response, but it will not be + * respected even if it's set in the request body. The setting name in the path parameter will be + * respected instead. Setting name is required to be 'default' if the setting only has one + * instance per workspace. + */ + @JsonProperty("setting_name") + private String settingName; + + public LlmProxyPartnerPoweredWorkspace setBooleanVal(BooleanMessage booleanVal) { + this.booleanVal = booleanVal; + return this; + } + + public BooleanMessage getBooleanVal() { + return booleanVal; + } + + public LlmProxyPartnerPoweredWorkspace setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public LlmProxyPartnerPoweredWorkspace setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LlmProxyPartnerPoweredWorkspace that = (LlmProxyPartnerPoweredWorkspace) o; + return Objects.equals(booleanVal, that.booleanVal) + && Objects.equals(etag, that.etag) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(booleanVal, etag, settingName); + } + + @Override + public String toString() { + return new ToStringer(LlmProxyPartnerPoweredWorkspace.class) + .add("booleanVal", booleanVal) + .add("etag", etag) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceAPI.java new file mode 100755 index 000000000..fcb5dfbc0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceAPI.java @@ -0,0 +1,68 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Determines if partner powered models are enabled or not for a specific workspace */ +@Generated +public class LlmProxyPartnerPoweredWorkspaceAPI { + private static final Logger LOG = + LoggerFactory.getLogger(LlmProxyPartnerPoweredWorkspaceAPI.class); + + private final LlmProxyPartnerPoweredWorkspaceService impl; + + /** Regular-use constructor */ + public LlmProxyPartnerPoweredWorkspaceAPI(ApiClient apiClient) { + impl = new LlmProxyPartnerPoweredWorkspaceImpl(apiClient); + } + + /** Constructor for mocks */ + public LlmProxyPartnerPoweredWorkspaceAPI(LlmProxyPartnerPoweredWorkspaceService mock) { + impl = mock; + } + + /** + * Delete the enable partner powered AI features workspace setting. + * + *

Reverts the enable partner powered AI features workspace setting to its default value. + */ + public DeleteLlmProxyPartnerPoweredWorkspaceResponse delete( + DeleteLlmProxyPartnerPoweredWorkspaceRequest request) { + return impl.delete(request); + } + + /** + * Get the enable partner powered AI features workspace setting. + * + *

Gets the enable partner powered AI features workspace setting. + */ + public LlmProxyPartnerPoweredWorkspace get(GetLlmProxyPartnerPoweredWorkspaceRequest request) { + return impl.get(request); + } + + public LlmProxyPartnerPoweredWorkspace update( + boolean allowMissing, LlmProxyPartnerPoweredWorkspace setting, String fieldMask) { + return update( + new UpdateLlmProxyPartnerPoweredWorkspaceRequest() + .setAllowMissing(allowMissing) + .setSetting(setting) + .setFieldMask(fieldMask)); + } + + /** + * Update the enable partner powered AI features workspace setting. + * + *

Updates the enable partner powered AI features workspace setting. + */ + public LlmProxyPartnerPoweredWorkspace update( + UpdateLlmProxyPartnerPoweredWorkspaceRequest request) { + return impl.update(request); + } + + public LlmProxyPartnerPoweredWorkspaceService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceImpl.java new file mode 100755 index 000000000..7ebe4b415 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceImpl.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.support.Generated; +import java.io.IOException; + +/** Package-local implementation of LlmProxyPartnerPoweredWorkspace */ +@Generated +class LlmProxyPartnerPoweredWorkspaceImpl implements LlmProxyPartnerPoweredWorkspaceService { + private final ApiClient apiClient; + + public LlmProxyPartnerPoweredWorkspaceImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public DeleteLlmProxyPartnerPoweredWorkspaceResponse delete( + DeleteLlmProxyPartnerPoweredWorkspaceRequest request) { + String path = "/api/2.0/settings/types/llm_proxy_partner_powered/names/default"; + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, DeleteLlmProxyPartnerPoweredWorkspaceResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public LlmProxyPartnerPoweredWorkspace get(GetLlmProxyPartnerPoweredWorkspaceRequest request) { + String path = "/api/2.0/settings/types/llm_proxy_partner_powered/names/default"; + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, LlmProxyPartnerPoweredWorkspace.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public LlmProxyPartnerPoweredWorkspace update( + UpdateLlmProxyPartnerPoweredWorkspaceRequest request) { + String path = "/api/2.0/settings/types/llm_proxy_partner_powered/names/default"; + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, LlmProxyPartnerPoweredWorkspace.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceService.java new file mode 100755 index 000000000..ee6083576 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/LlmProxyPartnerPoweredWorkspaceService.java @@ -0,0 +1,38 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +/** + * Determines if partner powered models are enabled or not for a specific workspace + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface LlmProxyPartnerPoweredWorkspaceService { + /** + * Delete the enable partner powered AI features workspace setting. + * + *

Reverts the enable partner powered AI features workspace setting to its default value. + */ + DeleteLlmProxyPartnerPoweredWorkspaceResponse delete( + DeleteLlmProxyPartnerPoweredWorkspaceRequest deleteLlmProxyPartnerPoweredWorkspaceRequest); + + /** + * Get the enable partner powered AI features workspace setting. + * + *

Gets the enable partner powered AI features workspace setting. + */ + LlmProxyPartnerPoweredWorkspace get( + GetLlmProxyPartnerPoweredWorkspaceRequest getLlmProxyPartnerPoweredWorkspaceRequest); + + /** + * Update the enable partner powered AI features workspace setting. + * + *

Updates the enable partner powered AI features workspace setting. + */ + LlmProxyPartnerPoweredWorkspace update( + UpdateLlmProxyPartnerPoweredWorkspaceRequest updateLlmProxyPartnerPoweredWorkspaceRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java index 81648bdf3..b0fba4f64 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java @@ -35,6 +35,8 @@ public class SettingsAPI { private EnhancedSecurityMonitoringAPI enhancedSecurityMonitoringAPI; + private LlmProxyPartnerPoweredWorkspaceAPI llmProxyPartnerPoweredWorkspaceAPI; + private RestrictWorkspaceAdminsAPI restrictWorkspaceAdminsAPI; /** Regular-use constructor */ @@ -64,6 +66,8 @@ public SettingsAPI(ApiClient apiClient) { enhancedSecurityMonitoringAPI = new EnhancedSecurityMonitoringAPI(apiClient); + llmProxyPartnerPoweredWorkspaceAPI = new LlmProxyPartnerPoweredWorkspaceAPI(apiClient); + restrictWorkspaceAdminsAPI = new RestrictWorkspaceAdminsAPI(apiClient); } @@ -136,6 +140,11 @@ public EnhancedSecurityMonitoringAPI EnhancedSecurityMonitoring() { return enhancedSecurityMonitoringAPI; } + /** Determines if partner powered models are enabled or not for a specific workspace. */ + public LlmProxyPartnerPoweredWorkspaceAPI LlmProxyPartnerPoweredWorkspace() { + return llmProxyPartnerPoweredWorkspaceAPI; + } + /** * The Restrict Workspace Admins setting lets you control the capabilities of workspace admins. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredAccountRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredAccountRequest.java new file mode 100755 index 000000000..3d8db559a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredAccountRequest.java @@ -0,0 +1,87 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +public class UpdateLlmProxyPartnerPoweredAccountRequest { + /** This should always be set to true for Settings API. Added for AIP compliance. */ + @JsonProperty("allow_missing") + private Boolean allowMissing; + + /** + * The field mask must be a single string, with multiple fields separated by commas (no spaces). + * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not + * allowed, as only the entire collection field can be specified. Field names must exactly match + * the resource field names. + * + *

A field mask of `*` indicates full replacement. It’s recommended to always explicitly list + * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if + * the API changes in the future. + */ + @JsonProperty("field_mask") + private String fieldMask; + + /** */ + @JsonProperty("setting") + private LlmProxyPartnerPoweredAccount setting; + + public UpdateLlmProxyPartnerPoweredAccountRequest setAllowMissing(Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateLlmProxyPartnerPoweredAccountRequest setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateLlmProxyPartnerPoweredAccountRequest setSetting( + LlmProxyPartnerPoweredAccount setting) { + this.setting = setting; + return this; + } + + public LlmProxyPartnerPoweredAccount getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateLlmProxyPartnerPoweredAccountRequest that = + (UpdateLlmProxyPartnerPoweredAccountRequest) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateLlmProxyPartnerPoweredAccountRequest.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredEnforceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredEnforceRequest.java new file mode 100755 index 000000000..c644fc32c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredEnforceRequest.java @@ -0,0 +1,87 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +public class UpdateLlmProxyPartnerPoweredEnforceRequest { + /** This should always be set to true for Settings API. Added for AIP compliance. */ + @JsonProperty("allow_missing") + private Boolean allowMissing; + + /** + * The field mask must be a single string, with multiple fields separated by commas (no spaces). + * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not + * allowed, as only the entire collection field can be specified. Field names must exactly match + * the resource field names. + * + *

A field mask of `*` indicates full replacement. It’s recommended to always explicitly list + * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if + * the API changes in the future. + */ + @JsonProperty("field_mask") + private String fieldMask; + + /** */ + @JsonProperty("setting") + private LlmProxyPartnerPoweredEnforce setting; + + public UpdateLlmProxyPartnerPoweredEnforceRequest setAllowMissing(Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateLlmProxyPartnerPoweredEnforceRequest setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateLlmProxyPartnerPoweredEnforceRequest setSetting( + LlmProxyPartnerPoweredEnforce setting) { + this.setting = setting; + return this; + } + + public LlmProxyPartnerPoweredEnforce getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateLlmProxyPartnerPoweredEnforceRequest that = + (UpdateLlmProxyPartnerPoweredEnforceRequest) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateLlmProxyPartnerPoweredEnforceRequest.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredWorkspaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredWorkspaceRequest.java new file mode 100755 index 000000000..52f1bb85a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredWorkspaceRequest.java @@ -0,0 +1,87 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +public class UpdateLlmProxyPartnerPoweredWorkspaceRequest { + /** This should always be set to true for Settings API. Added for AIP compliance. */ + @JsonProperty("allow_missing") + private Boolean allowMissing; + + /** + * The field mask must be a single string, with multiple fields separated by commas (no spaces). + * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not + * allowed, as only the entire collection field can be specified. Field names must exactly match + * the resource field names. + * + *

A field mask of `*` indicates full replacement. It’s recommended to always explicitly list + * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if + * the API changes in the future. + */ + @JsonProperty("field_mask") + private String fieldMask; + + /** */ + @JsonProperty("setting") + private LlmProxyPartnerPoweredWorkspace setting; + + public UpdateLlmProxyPartnerPoweredWorkspaceRequest setAllowMissing(Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateLlmProxyPartnerPoweredWorkspaceRequest setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateLlmProxyPartnerPoweredWorkspaceRequest setSetting( + LlmProxyPartnerPoweredWorkspace setting) { + this.setting = setting; + return this; + } + + public LlmProxyPartnerPoweredWorkspace getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateLlmProxyPartnerPoweredWorkspaceRequest that = + (UpdateLlmProxyPartnerPoweredWorkspaceRequest) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateLlmProxyPartnerPoweredWorkspaceRequest.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2API.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2API.java index 3d85aeec4..756895733 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2API.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2API.java @@ -24,6 +24,10 @@ public AlertsV2API(AlertsV2Service mock) { impl = mock; } + public AlertV2 createAlert(AlertV2 alert) { + return createAlert(new CreateAlertV2Request().setAlert(alert)); + } + /** * Create an alert. * @@ -80,8 +84,9 @@ public void trashAlert(TrashAlertV2Request request) { impl.trashAlert(request); } - public AlertV2 updateAlert(String id, String updateMask) { - return updateAlert(new UpdateAlertV2Request().setId(id).setUpdateMask(updateMask)); + public AlertV2 updateAlert(String id, AlertV2 alert, String updateMask) { + return updateAlert( + new UpdateAlertV2Request().setId(id).setAlert(alert).setUpdateMask(updateMask)); } /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2Impl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2Impl.java index 42c268b80..b8379503e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2Impl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2Impl.java @@ -20,7 +20,7 @@ public AlertsV2Impl(ApiClient apiClient) { public AlertV2 createAlert(CreateAlertV2Request request) { String path = "/api/2.0/alerts"; try { - Request req = new Request("POST", path, apiClient.serialize(request)); + Request req = new Request("POST", path, apiClient.serialize(request.getAlert())); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); @@ -73,7 +73,7 @@ public void trashAlert(TrashAlertV2Request request) { public AlertV2 updateAlert(UpdateAlertV2Request request) { String path = String.format("/api/2.0/alerts/%s", request.getId()); try { - Request req = new Request("PATCH", path, apiClient.serialize(request)); + Request req = new Request("PATCH", path, apiClient.serialize(request.getAlert())); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertV2Request.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertV2Request.java index e9dce84d6..10b1698ab 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertV2Request.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertV2Request.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Create an alert */ @Generated public class CreateAlertV2Request { /** */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertV2Request.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertV2Request.java index a1df8b791..1e43290d8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertV2Request.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertV2Request.java @@ -3,11 +3,13 @@ package com.databricks.sdk.service.sql; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Update an alert */ @Generated public class UpdateAlertV2Request { /** */ @@ -28,7 +30,8 @@ public class UpdateAlertV2Request { * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if * the API changes in the future. */ - @JsonProperty("update_mask") + @JsonIgnore + @QueryParam("update_mask") private String updateMask; public UpdateAlertV2Request setAlert(AlertV2 alert) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListValue.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListValue.java index a7dd16a46..f26fda411 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListValue.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListValue.java @@ -8,13 +8,6 @@ import java.util.Collection; import java.util.Objects; -/** - * copied from proto3 / Google Well Known Types, source: - * https://github.com/protocolbuffers/protobuf/blob/450d24ca820750c5db5112a6f0b0c2efb9758021/src/google/protobuf/struct.proto - * `ListValue` is a wrapper around a repeated field of values. - * - *

The JSON representation for `ListValue` is JSON array. - */ @Generated public class ListValue { /** Repeated field of dynamically typed values. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Struct.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Struct.java index c7bda63c5..7e8e17153 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Struct.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Struct.java @@ -8,16 +8,6 @@ import java.util.Collection; import java.util.Objects; -/** - * copied from proto3 / Google Well Known Types, source: - * https://github.com/protocolbuffers/protobuf/blob/450d24ca820750c5db5112a6f0b0c2efb9758021/src/google/protobuf/struct.proto - * `Struct` represents a structured data value, consisting of fields which map to dynamically typed - * values. In some languages, `Struct` might be supported by a native representation. For example, - * in scripting languages like JS a struct is represented as an object. The details of that - * representation are described together with the proto support for the language. - * - *

The JSON representation for `Struct` is JSON object. - */ @Generated public class Struct { /** Data entry, corresponding to a row in a vector index. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Value.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Value.java index 2ab3764de..2c50b490a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Value.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Value.java @@ -13,13 +13,7 @@ public class Value { @JsonProperty("bool_value") private Boolean boolValue; - /** - * copied from proto3 / Google Well Known Types, source: - * https://github.com/protocolbuffers/protobuf/blob/450d24ca820750c5db5112a6f0b0c2efb9758021/src/google/protobuf/struct.proto - * `ListValue` is a wrapper around a repeated field of values. - * - *

The JSON representation for `ListValue` is JSON array. - */ + /** */ @JsonProperty("list_value") private ListValue listValue; @@ -31,16 +25,7 @@ public class Value { @JsonProperty("string_value") private String stringValue; - /** - * copied from proto3 / Google Well Known Types, source: - * https://github.com/protocolbuffers/protobuf/blob/450d24ca820750c5db5112a6f0b0c2efb9758021/src/google/protobuf/struct.proto - * `Struct` represents a structured data value, consisting of fields which map to dynamically - * typed values. In some languages, `Struct` might be supported by a native representation. For - * example, in scripting languages like JS a struct is represented as an object. The details of - * that representation are described together with the proto support for the language. - * - *

The JSON representation for `Struct` is JSON object. - */ + /** */ @JsonProperty("struct_value") private Struct structValue;