From 88e6e4f7f35968c36509283b8a4560f850c13a47 Mon Sep 17 00:00:00 2001 From: Parth Bansal Date: Tue, 11 Mar 2025 14:39:56 +0000 Subject: [PATCH] update --- .codegen/_openapi_sha | 2 +- .gitattributes | 50 ++- .../com/databricks/sdk/WorkspaceClient.java | 20 ++ .../databricks/sdk/core/DatabricksConfig.java | 8 +- .../com/databricks/sdk/service/apps/App.java | 32 ++ .../billing/CreateBudgetPolicyRequest.java | 41 +-- .../sdk/service/catalog/ConnectionType.java | 2 + .../sdk/service/catalog/TagKeyValue.java | 55 +++ .../catalog/ValidateCredentialRequest.java | 17 + .../CleanRoomAssetTableLocalDetails.java | 7 +- .../ListCleanRoomNotebookTaskRunsRequest.java | 2 +- .../service/compute/ClusterAttributes.java | 23 +- .../sdk/service/compute/ClusterDetails.java | 23 +- .../sdk/service/compute/ClusterSpec.java | 23 +- .../sdk/service/compute/CreateCluster.java | 23 +- .../sdk/service/compute/DataSecurityMode.java | 8 +- .../sdk/service/compute/DiskSpec.java | 5 + .../sdk/service/compute/DiskType.java | 11 +- .../compute/DiskTypeAzureDiskVolumeType.java | 4 + .../compute/DiskTypeEbsVolumeType.java | 4 + .../sdk/service/compute/DockerImage.java | 2 +- .../sdk/service/compute/EditCluster.java | 23 +- .../sdk/service/compute/GetInstancePool.java | 2 +- .../compute/GlobalInitScriptsImpl.java | 2 + .../service/compute/InstancePoolAndStats.java | 2 +- .../compute/InstancePoolAwsAttributes.java | 10 +- ...InstancePoolAwsAttributesAvailability.java | 6 +- .../compute/InstancePoolAzureAttributes.java | 13 +- ...stancePoolAzureAttributesAvailability.java | 6 +- .../compute/InstancePoolGcpAttributes.java | 1 + .../service/compute/InstancePoolState.java | 7 +- .../databricks/sdk/service/compute/Kind.java | 11 +- .../service/compute/PendingInstanceError.java | 1 + .../compute/UpdateClusterResource.java | 23 +- .../sdk/service/dashboards/DataType.java | 26 -- .../sdk/service/dashboards/GenieAPI.java | 69 +++- .../service/dashboards/GenieAttachment.java | 35 +- .../service/dashboards/GenieConversation.java | 22 +- ...eExecuteMessageAttachmentQueryRequest.java | 86 +++++ .../GenieExecuteMessageQueryRequest.java | 2 +- ...etMessageAttachmentQueryResultRequest.java | 87 +++++ .../GenieGetMessageQueryResultRequest.java | 2 +- ...enieGetQueryResultByAttachmentRequest.java | 2 +- .../dashboards/GenieGetSpaceRequest.java | 42 +++ .../sdk/service/dashboards/GenieImpl.java | 53 +++ .../sdk/service/dashboards/GenieMessage.java | 50 ++- ...achment.java => GenieQueryAttachment.java} | 98 ++--- .../dashboards/GenieResultMetadata.java | 58 +++ .../sdk/service/dashboards/GenieService.java | 39 +- .../sdk/service/dashboards/GenieSpace.java | 74 ++++ .../service/dashboards/MessageErrorType.java | 3 + .../sdk/service/dashboards/MessageStatus.java | 46 +-- .../sdk/service/dashboards/QuerySchema.java | 62 ---- .../service/dashboards/QuerySchemaColumn.java | 74 ---- .../sdk/service/files/DownloadResponse.java | 4 +- .../service/files/GetMetadataResponse.java | 4 +- .../databricks/sdk/service/jobs/BaseRun.java | 5 +- .../sdk/service/jobs/ComputeConfig.java | 75 ++++ .../sdk/service/jobs/GenAiComputeTask.java | 175 +++++++++ .../databricks/sdk/service/jobs/JobsImpl.java | 38 +- .../com/databricks/sdk/service/jobs/Run.java | 5 +- .../jobs/RunLifecycleStateV2State.java | 1 + .../databricks/sdk/service/jobs/RunNow.java | 4 +- .../databricks/sdk/service/jobs/RunTask.java | 21 +- .../sdk/service/jobs/SubmitTask.java | 16 + .../com/databricks/sdk/service/jobs/Task.java | 16 + .../sdk/service/marketplace/AssetType.java | 1 + .../CreateForecastingExperimentRequest.java | 336 ++++++++++++++++++ .../CreateForecastingExperimentResponse.java | 44 +++ .../databricks/sdk/service/ml/CreateRun.java | 17 +- .../databricks/sdk/service/ml/Dataset.java | 12 +- .../sdk/service/ml/DatasetInput.java | 1 + .../databricks/sdk/service/ml/Experiment.java | 1 + .../sdk/service/ml/ExperimentTag.java | 1 + .../sdk/service/ml/ExperimentsAPI.java | 70 ++-- .../sdk/service/ml/ExperimentsImpl.java | 4 +- .../sdk/service/ml/ExperimentsService.java | 64 ++-- .../databricks/sdk/service/ml/FileInfo.java | 1 + .../sdk/service/ml/ForecastingAPI.java | 126 +++++++ .../sdk/service/ml/ForecastingExperiment.java | 75 ++++ .../ml/ForecastingExperimentState.java | 14 + .../sdk/service/ml/ForecastingImpl.java | 47 +++ .../sdk/service/ml/ForecastingService.java | 30 ++ .../sdk/service/ml/GetByNameRequest.java | 2 +- .../ml/GetExperimentByNameResponse.java | 44 +++ .../ml/GetForecastingExperimentRequest.java | 44 +++ .../sdk/service/ml/GetHistoryRequest.java | 4 +- .../service/ml/GetMetricHistoryResponse.java | 11 +- .../sdk/service/ml/GetRunRequest.java | 4 +- .../databricks/sdk/service/ml/InputTag.java | 1 + .../sdk/service/ml/ListArtifactsRequest.java | 4 +- .../service/ml/ListExperimentsRequest.java | 6 +- .../databricks/sdk/service/ml/LogMetric.java | 2 +- .../databricks/sdk/service/ml/LogParam.java | 4 +- .../com/databricks/sdk/service/ml/Metric.java | 1 + .../com/databricks/sdk/service/ml/Param.java | 1 + .../com/databricks/sdk/service/ml/Run.java | 1 + .../databricks/sdk/service/ml/RunData.java | 1 + .../databricks/sdk/service/ml/RunInfo.java | 21 +- .../sdk/service/ml/RunInfoStatus.java | 2 +- .../databricks/sdk/service/ml/RunInputs.java | 1 + .../com/databricks/sdk/service/ml/RunTag.java | 1 + .../sdk/service/ml/SearchExperiments.java | 6 +- .../service/ml/SearchExperimentsViewType.java | 15 - .../databricks/sdk/service/ml/SearchRuns.java | 14 +- .../sdk/service/ml/SetExperimentTag.java | 10 +- .../com/databricks/sdk/service/ml/SetTag.java | 12 +- .../databricks/sdk/service/ml/UpdateRun.java | 19 +- .../sdk/service/ml/UpdateRunStatus.java | 2 +- ...archRunsRunViewType.java => ViewType.java} | 4 +- .../CreateServicePrincipalSecretRequest.java | 24 +- .../CreateServicePrincipalSecretResponse.java | 19 +- .../service/oauth2/OidcFederationPolicy.java | 9 +- .../sdk/service/oauth2/SecretInfo.java | 19 +- .../oauth2/ServicePrincipalSecretsImpl.java | 3 +- .../service/serving/AmazonBedrockConfig.java | 25 +- .../serving/CreateServingEndpoint.java | 17 +- .../sdk/service/serving/ServingEndpoint.java | 16 + .../serving/ServingEndpointDetailed.java | 16 + .../service/serving/ServingEndpointsImpl.java | 1 - .../settings/AccountIpAccessListsImpl.java | 3 - .../service/settings/IpAccessListsImpl.java | 3 - .../service/settings/TokenManagementImpl.java | 1 - .../service/sharing/AuthenticationType.java | 1 + .../sdk/service/sharing/ColumnTypeName.java | 35 ++ .../sharing/DeltaSharingDependency.java | 59 +++ .../sharing/DeltaSharingDependencyList.java | 47 +++ .../DeltaSharingFunctionDependency.java | 60 ++++ .../sharing/DeltaSharingTableDependency.java | 59 +++ .../sdk/service/sharing/Function.java | 286 +++++++++++++++ .../sharing/FunctionParameterInfo.java | 224 ++++++++++++ .../sharing/FunctionParameterInfos.java | 43 +++ .../sharing/FunctionParameterMode.java | 12 + .../sharing/FunctionParameterType.java | 11 + .../sharing/GetSharePermissionsResponse.java | 64 ++++ .../ListProviderShareAssetsRequest.java | 129 +++++++ .../ListProviderShareAssetsResponse.java | 91 +++++ .../sdk/service/sharing/NotebookFile.java | 120 +++++++ .../PartitionSpecificationPartition.java | 43 --- .../service/sharing/PermissionsChange.java | 75 ++++ .../sdk/service/sharing/ProvidersAPI.java | 17 + .../sdk/service/sharing/ProvidersImpl.java | 18 +- .../sdk/service/sharing/ProvidersService.java | 9 + .../sdk/service/sharing/RecipientsImpl.java | 1 - .../service/sharing/RegisteredModelAlias.java | 58 +++ .../sdk/service/sharing/SharedDataObject.java | 15 +- .../SharedDataObjectDataObjectType.java | 1 - ...redDataObjectHistoryDataSharingStatus.java | 4 - .../sharing/SharedDataObjectStatus.java | 1 - .../sharing/SharedDataObjectUpdateAction.java | 1 - .../service/sharing/SharedSecurableKind.java | 13 + .../sdk/service/sharing/SharesAPI.java | 17 +- .../sdk/service/sharing/SharesImpl.java | 10 +- .../sdk/service/sharing/SharesService.java | 9 +- .../databricks/sdk/service/sharing/Table.java | 166 +++++++++ .../sharing/TableInternalAttributes.java | 97 +++++ ...ableInternalAttributesSharedTableType.java | 15 + .../sharing/UpdatePermissionsResponse.java | 28 -- .../sharing/UpdateSharePermissions.java | 53 +-- .../UpdateSharePermissionsResponse.java | 46 +++ .../sdk/service/sharing/Volume.java | 153 ++++++++ .../sharing/VolumeInternalAttributes.java | 60 ++++ .../vectorsearch/QueryVectorIndexRequest.java | 16 + .../QueryVectorIndexResponse.java | 3 +- .../sdk/service/workspace/ExportFormat.java | 2 + .../sdk/service/workspace/ExportResponse.java | 4 + .../sdk/service/workspace/ImportFormat.java | 26 +- .../sdk/service/workspace/Language.java | 2 +- .../sdk/service/workspace/ObjectInfo.java | 5 +- .../sdk/service/workspace/ObjectType.java | 20 +- .../sdk/service/jobs/JobsImplTest.java | 165 --------- tagging.py | 7 +- 172 files changed, 4430 insertions(+), 971 deletions(-) create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TagKeyValue.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DataType.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageAttachmentQueryRequest.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageAttachmentQueryResultRequest.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetSpaceRequest.java rename databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/{QueryAttachment.java => GenieQueryAttachment.java} (51%) create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieResultMetadata.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSpace.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QuerySchema.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QuerySchemaColumn.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ComputeConfig.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GenAiComputeTask.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentRequest.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentResponse.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingAPI.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingExperiment.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingExperimentState.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingImpl.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingService.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentByNameResponse.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetForecastingExperimentRequest.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperimentsViewType.java rename databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/{SearchRunsRunViewType.java => ViewType.java} (62%) create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnTypeName.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependency.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependencyList.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingFunctionDependency.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingTableDependency.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Function.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterInfo.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterInfos.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterMode.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterType.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetSharePermissionsResponse.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderShareAssetsRequest.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderShareAssetsResponse.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/NotebookFile.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PartitionSpecificationPartition.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PermissionsChange.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RegisteredModelAlias.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedSecurableKind.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Table.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TableInternalAttributes.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TableInternalAttributesSharedTableType.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdatePermissionsResponse.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissionsResponse.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Volume.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/VolumeInternalAttributes.java delete mode 100644 databricks-sdk-java/src/test/java/com/databricks/sdk/service/jobs/JobsImplTest.java diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 562b72fcc..a7b80d538 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -99f644e72261ef5ecf8d74db20f4b7a1e09723cc \ No newline at end of file +cd641c9dd4febe334b339dd7878d099dcf0eeab5 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index b90310c6d..b5d27343a 100755 --- a/.gitattributes +++ b/.gitattributes @@ -451,6 +451,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableType.j databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TagKeyValue.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryCredentials.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsImpl.java linguist-generated=true @@ -796,7 +797,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSu databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CronSchedule.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Dashboard.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DashboardView.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DataType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteScheduleRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteScheduleResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionRequest.java linguist-generated=true @@ -808,14 +808,20 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAttachment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieConversation.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateConversationMessageRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageAttachmentQueryRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageQueryRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetConversationMessageRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageAttachmentQueryResultRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetQueryResultByAttachmentRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetSpaceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieQueryAttachment.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieResultMetadata.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSpace.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieStartConversationMessageRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieStartConversationResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetDashboardRequest.java linguist-generated=true @@ -847,13 +853,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQuer databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponseData.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishedDashboard.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryAttachment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryResponseStatus.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QuerySchema.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QuerySchemaColumn.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Result.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Schedule.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SchedulePauseStatus.java linguist-generated=true @@ -1048,6 +1051,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomsNote databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ClusterInstance.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ClusterSpec.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ComputeConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Condition.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ConditionTask.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ConditionTaskOp.java linguist-generated=true @@ -1072,6 +1076,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTask.ja databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTaskErrorMessageStats.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTaskTaskRunStats.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Format.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GenAiComputeTask.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobPermissionLevelsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobPermissionLevelsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobPermissionsRequest.java linguist-generated=true @@ -1382,6 +1387,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateComment.ja databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateCommentResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateExperiment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateExperimentResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelVersionRequest.java linguist-generated=true @@ -1430,12 +1437,19 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.j databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FileInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingExperiment.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingExperimentState.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetByNameRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentByNameResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionLevelsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionLevelsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetForecastingExperimentRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetHistoryRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLatestVersionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLatestVersionsResponse.java linguist-generated=true @@ -1517,14 +1531,12 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputs.java l databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunTag.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperiments.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperimentsResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperimentsViewType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelVersionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelVersionsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchRuns.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchRunsResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchRunsRunViewType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTag.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTagResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelTagRequest.java linguist-generated=true @@ -1554,6 +1566,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRun.java l databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRunResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRunStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateWebhookResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ViewType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyService.java linguist-generated=true @@ -2126,6 +2139,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceC databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceConfImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceConfService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/AuthenticationType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnTypeName.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateProvider.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateRecipient.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateShare.java linguist-generated=true @@ -2133,13 +2147,25 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteProvi databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteRecipientRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteShareRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependency.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependencyList.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingFunctionDependency.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingTableDependency.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Function.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterInfos.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterMode.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetActivationUrlInfoRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetActivationUrlInfoResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetProviderRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetRecipientRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetRecipientSharePermissionsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetSharePermissionsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetShareRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/IpAccessList.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderShareAssetsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderShareAssetsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderSharesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProvidersRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProvidersResponse.java linguist-generated=true @@ -2147,10 +2173,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListRecipie databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListRecipientsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListSharesRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListSharesResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/NotebookFile.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Partition.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PartitionSpecificationPartition.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PartitionValue.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PartitionValueOp.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PermissionsChange.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Privilege.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PrivilegeAssignment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProviderInfo.java linguist-generated=true @@ -2167,6 +2194,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientTo databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RegisteredModelAlias.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RetrieveTokenRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RetrieveTokenResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RotateRecipientToken.java linguist-generated=true @@ -2180,14 +2208,20 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataO databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectUpdate.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectUpdateAction.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedSecurableKind.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesService.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdatePermissionsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Table.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TableInternalAttributes.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TableInternalAttributesSharedTableType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateProvider.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateRecipient.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateShare.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissions.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissionsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Volume.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/VolumeInternalAttributes.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AccessControl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Alert.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertCondition.java linguist-generated=true diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index 12452d424..547aa11f5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -132,6 +132,8 @@ import com.databricks.sdk.service.marketplace.ProviderProvidersService; import com.databricks.sdk.service.ml.ExperimentsAPI; import com.databricks.sdk.service.ml.ExperimentsService; +import com.databricks.sdk.service.ml.ForecastingAPI; +import com.databricks.sdk.service.ml.ForecastingService; import com.databricks.sdk.service.ml.ModelRegistryAPI; import com.databricks.sdk.service.ml.ModelRegistryService; import com.databricks.sdk.service.pipelines.PipelinesAPI; @@ -304,6 +306,7 @@ public class WorkspaceClient { private WorkspaceAPI workspaceAPI; private WorkspaceBindingsAPI workspaceBindingsAPI; private WorkspaceConfAPI workspaceConfAPI; + private ForecastingAPI forecastingAPI; public WorkspaceClient() { this(ConfigLoader.getDefault()); @@ -410,6 +413,7 @@ public WorkspaceClient(DatabricksConfig config) { workspaceAPI = new WorkspaceAPI(apiClient); workspaceBindingsAPI = new WorkspaceBindingsAPI(apiClient); workspaceConfAPI = new WorkspaceConfAPI(apiClient); + forecastingAPI = new ForecastingAPI(apiClient); } /** Constructor for mocks */ @@ -1727,6 +1731,11 @@ public WorkspaceConfAPI workspaceConf() { return workspaceConfAPI; } + /** The Forecasting API allows you to create and get serverless forecasting experiments */ + public ForecastingAPI forecasting() { + return forecastingAPI; + } + /** Replace the default AccessControlService with a custom implementation. */ public WorkspaceClient withAccessControlImpl(AccessControlService accessControl) { return this.withAccessControlAPI(new AccessControlAPI(accessControl)); @@ -2835,6 +2844,17 @@ public WorkspaceClient withWorkspaceConfAPI(WorkspaceConfAPI workspaceConf) { return this; } + /** Replace the default ForecastingService with a custom implementation. */ + public WorkspaceClient withForecastingImpl(ForecastingService forecasting) { + return this.withForecastingAPI(new ForecastingAPI(forecasting)); + } + + /** Replace the default ForecastingAPI with a custom implementation. */ + public WorkspaceClient withForecastingAPI(ForecastingAPI forecasting) { + this.forecastingAPI = forecasting; + return this; + } + public ApiClient apiClient() { return apiClient; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java index 2943ce82e..a6ae5e4ac 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java @@ -374,13 +374,17 @@ public DatabricksConfig setAzureUseMsi(boolean azureUseMsi) { return this; } - /** @deprecated Use {@link #getAzureUseMsi()} instead. */ + /** + * @deprecated Use {@link #getAzureUseMsi()} instead. + */ @Deprecated() public boolean getAzureUseMSI() { return azureUseMsi; } - /** @deprecated Use {@link #setAzureUseMsi(boolean)} instead. */ + /** + * @deprecated Use {@link #setAzureUseMsi(boolean)} instead. + */ @Deprecated public DatabricksConfig setAzureUseMSI(boolean azureUseMsi) { this.azureUseMsi = azureUseMsi; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java index 6fe31a5b2..0b0d38da1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java @@ -21,6 +21,10 @@ public class App { @JsonProperty("app_status") private ApplicationStatus appStatus; + /** */ + @JsonProperty("budget_policy_id") + private String budgetPolicyId; + /** */ @JsonProperty("compute_status") private ComputeStatus computeStatus; @@ -44,6 +48,10 @@ public class App { @JsonProperty("description") private String description; + /** */ + @JsonProperty("effective_budget_policy_id") + private String effectiveBudgetPolicyId; + /** The unique identifier of the app. */ @JsonProperty("id") private String id; @@ -108,6 +116,15 @@ public ApplicationStatus getAppStatus() { return appStatus; } + public App setBudgetPolicyId(String budgetPolicyId) { + this.budgetPolicyId = budgetPolicyId; + return this; + } + + public String getBudgetPolicyId() { + return budgetPolicyId; + } + public App setComputeStatus(ComputeStatus computeStatus) { this.computeStatus = computeStatus; return this; @@ -153,6 +170,15 @@ public String getDescription() { return description; } + public App setEffectiveBudgetPolicyId(String effectiveBudgetPolicyId) { + this.effectiveBudgetPolicyId = effectiveBudgetPolicyId; + return this; + } + + public String getEffectiveBudgetPolicyId() { + return effectiveBudgetPolicyId; + } + public App setId(String id) { this.id = id; return this; @@ -250,11 +276,13 @@ public boolean equals(Object o) { App that = (App) o; return Objects.equals(activeDeployment, that.activeDeployment) && Objects.equals(appStatus, that.appStatus) + && Objects.equals(budgetPolicyId, that.budgetPolicyId) && Objects.equals(computeStatus, that.computeStatus) && Objects.equals(createTime, that.createTime) && Objects.equals(creator, that.creator) && Objects.equals(defaultSourceCodePath, that.defaultSourceCodePath) && Objects.equals(description, that.description) + && Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId) && Objects.equals(id, that.id) && Objects.equals(name, that.name) && Objects.equals(pendingDeployment, that.pendingDeployment) @@ -272,11 +300,13 @@ public int hashCode() { return Objects.hash( activeDeployment, appStatus, + budgetPolicyId, computeStatus, createTime, creator, defaultSourceCodePath, description, + effectiveBudgetPolicyId, id, name, pendingDeployment, @@ -294,11 +324,13 @@ public String toString() { return new ToStringer(App.class) .add("activeDeployment", activeDeployment) .add("appStatus", appStatus) + .add("budgetPolicyId", budgetPolicyId) .add("computeStatus", computeStatus) .add("createTime", createTime) .add("creator", creator) .add("defaultSourceCodePath", defaultSourceCodePath) .add("description", description) + .add("effectiveBudgetPolicyId", effectiveBudgetPolicyId) .add("id", id) .add("name", name) .add("pendingDeployment", pendingDeployment) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetPolicyRequest.java index 8e399ffa3..2ed87cd0e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetPolicyRequest.java @@ -5,22 +5,18 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; import java.util.Objects; /** A request to create a BudgetPolicy. */ @Generated public class CreateBudgetPolicyRequest { - /** A list of tags defined by the customer. At most 40 entries are allowed per policy. */ - @JsonProperty("custom_tags") - private Collection customTags; - /** - * The name of the policy. - Must be unique among active policies. - Can contain only characters - * of 0-9, a-z, A-Z, -, =, ., :, /, @, _, +, whitespace. + * The policy to create. `policy_id` needs to be empty as it will be generated `policy_name` must + * be provided, custom_tags may need to be provided depending on the cloud provider. All other + * fields are optional. */ - @JsonProperty("policy_name") - private String policyName; + @JsonProperty("policy") + private BudgetPolicy policy; /** * A unique identifier for this request. Restricted to 36 ASCII characters. A random UUID is @@ -29,23 +25,13 @@ public class CreateBudgetPolicyRequest { @JsonProperty("request_id") private String requestId; - public CreateBudgetPolicyRequest setCustomTags( - Collection customTags) { - this.customTags = customTags; - return this; - } - - public Collection getCustomTags() { - return customTags; - } - - public CreateBudgetPolicyRequest setPolicyName(String policyName) { - this.policyName = policyName; + public CreateBudgetPolicyRequest setPolicy(BudgetPolicy policy) { + this.policy = policy; return this; } - public String getPolicyName() { - return policyName; + public BudgetPolicy getPolicy() { + return policy; } public CreateBudgetPolicyRequest setRequestId(String requestId) { @@ -62,21 +48,18 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CreateBudgetPolicyRequest that = (CreateBudgetPolicyRequest) o; - return Objects.equals(customTags, that.customTags) - && Objects.equals(policyName, that.policyName) - && Objects.equals(requestId, that.requestId); + return Objects.equals(policy, that.policy) && Objects.equals(requestId, that.requestId); } @Override public int hashCode() { - return Objects.hash(customTags, policyName, requestId); + return Objects.hash(policy, requestId); } @Override public String toString() { return new ToStringer(CreateBudgetPolicyRequest.class) - .add("customTags", customTags) - .add("policyName", policyName) + .add("policy", policy) .add("requestId", requestId) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java index 02d89b8ab..b6e6a3e33 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java @@ -13,9 +13,11 @@ public enum ConnectionType { HIVE_METASTORE, HTTP, MYSQL, + ORACLE, POSTGRESQL, REDSHIFT, SNOWFLAKE, SQLDW, SQLSERVER, + TERADATA, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TagKeyValue.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TagKeyValue.java new file mode 100755 index 000000000..3bc328e51 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TagKeyValue.java @@ -0,0 +1,55 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class TagKeyValue { + /** name of the tag */ + @JsonProperty("key") + private String key; + + /** value of the tag associated with the key, could be optional */ + @JsonProperty("value") + private String value; + + public TagKeyValue setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public TagKeyValue setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TagKeyValue that = (TagKeyValue) o; + return Objects.equals(key, that.key) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(key, value); + } + + @Override + public String toString() { + return new ToStringer(TagKeyValue.class).add("key", key).add("value", value).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequest.java index 00cb5fe0b..ff6867b05 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequest.java @@ -21,6 +21,10 @@ public class ValidateCredentialRequest { @JsonProperty("credential_name") private String credentialName; + /** GCP long-lived credential. Databricks-created Google Cloud Storage service account. */ + @JsonProperty("databricks_gcp_service_account") + private DatabricksGcpServiceAccount databricksGcpServiceAccount; + /** * The name of an existing external location to validate. Only applicable for storage credentials * (purpose is **STORAGE**.) @@ -71,6 +75,16 @@ public String getCredentialName() { return credentialName; } + public ValidateCredentialRequest setDatabricksGcpServiceAccount( + DatabricksGcpServiceAccount databricksGcpServiceAccount) { + this.databricksGcpServiceAccount = databricksGcpServiceAccount; + return this; + } + + public DatabricksGcpServiceAccount getDatabricksGcpServiceAccount() { + return databricksGcpServiceAccount; + } + public ValidateCredentialRequest setExternalLocationName(String externalLocationName) { this.externalLocationName = externalLocationName; return this; @@ -115,6 +129,7 @@ public boolean equals(Object o) { return Objects.equals(awsIamRole, that.awsIamRole) && Objects.equals(azureManagedIdentity, that.azureManagedIdentity) && Objects.equals(credentialName, that.credentialName) + && Objects.equals(databricksGcpServiceAccount, that.databricksGcpServiceAccount) && Objects.equals(externalLocationName, that.externalLocationName) && Objects.equals(purpose, that.purpose) && Objects.equals(readOnly, that.readOnly) @@ -127,6 +142,7 @@ public int hashCode() { awsIamRole, azureManagedIdentity, credentialName, + databricksGcpServiceAccount, externalLocationName, purpose, readOnly, @@ -139,6 +155,7 @@ public String toString() { .add("awsIamRole", awsIamRole) .add("azureManagedIdentity", azureManagedIdentity) .add("credentialName", credentialName) + .add("databricksGcpServiceAccount", databricksGcpServiceAccount) .add("externalLocationName", externalLocationName) .add("purpose", purpose) .add("readOnly", readOnly) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetTableLocalDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetTableLocalDetails.java index 775e0def6..308f3b99c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetTableLocalDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetTableLocalDetails.java @@ -19,7 +19,7 @@ public class CleanRoomAssetTableLocalDetails { /** Partition filtering specification for a shared table. */ @JsonProperty("partitions") - private Collection partitions; + private Collection partitions; public CleanRoomAssetTableLocalDetails setLocalName(String localName) { this.localName = localName; @@ -31,13 +31,12 @@ public String getLocalName() { } public CleanRoomAssetTableLocalDetails setPartitions( - Collection partitions) { + Collection partitions) { this.partitions = partitions; return this; } - public Collection - getPartitions() { + public Collection getPartitions() { return partitions; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomNotebookTaskRunsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomNotebookTaskRunsRequest.java index 5aaba13e8..5b64fdb82 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomNotebookTaskRunsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomNotebookTaskRunsRequest.java @@ -19,7 +19,7 @@ public class ListCleanRoomNotebookTaskRunsRequest { @QueryParam("notebook_name") private String notebookName; - /** The maximum number of task runs to return */ + /** The maximum number of task runs to return. Currently ignored - all runs will be returned. */ @JsonIgnore @QueryParam("page_size") private Long pageSize; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java index db0bdc978..5632c4bdb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java @@ -67,10 +67,10 @@ public class ClusterAttributes { * Data security mode decides what data governance model to use when accessing data from a * cluster. * - *

The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks - * will choose the most appropriate access mode depending on your compute configuration. * - * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: - * Alias for `SINGLE_USER`. + *

The following modes can only be used when `kind = CLASSIC_PREVIEW`. * + * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on + * your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * + * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`. * *

The following modes can be used regardless of `kind`. * `NONE`: No security isolation for * multiple users sharing the cluster. Data governance features are not available in this mode. * @@ -143,7 +143,7 @@ public class ClusterAttributes { private String instancePoolId; /** - * This field can only be used with `kind`. + * This field can only be used when `kind = CLASSIC_PREVIEW`. * *

When set to true, Databricks will automatically set single node related `custom_tags`, * `spark_conf`, and `num_workers` @@ -156,8 +156,15 @@ public class ClusterAttributes { * *

Depending on `kind`, different validations and default values will be applied. * - *

The first usage of this value is for the simple cluster form where it sets `kind = - * CLASSIC_PREVIEW`. + *

Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with + * no specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * + * [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * + * [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to + * `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` + * + *

By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. + * + *

[simple form]: https://docs.databricks.com/compute/simple-form.html */ @JsonProperty("kind") private Kind kind; @@ -231,7 +238,7 @@ public class ClusterAttributes { private Collection sshPublicKeys; /** - * This field can only be used with `kind`. + * This field can only be used when `kind = CLASSIC_PREVIEW`. * *

`effective_spark_version` is determined by `spark_version` (DBR release), this field * `use_ml_runtime`, and whether `node_type_id` is gpu node or not. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java index e8b977c41..0311bee84 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java @@ -111,10 +111,10 @@ public class ClusterDetails { * Data security mode decides what data governance model to use when accessing data from a * cluster. * - *

The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks - * will choose the most appropriate access mode depending on your compute configuration. * - * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: - * Alias for `SINGLE_USER`. + *

The following modes can only be used when `kind = CLASSIC_PREVIEW`. * + * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on + * your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * + * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`. * *

The following modes can be used regardless of `kind`. * `NONE`: No security isolation for * multiple users sharing the cluster. Data governance features are not available in this mode. * @@ -214,7 +214,7 @@ public class ClusterDetails { private String instancePoolId; /** - * This field can only be used with `kind`. + * This field can only be used when `kind = CLASSIC_PREVIEW`. * *

When set to true, Databricks will automatically set single node related `custom_tags`, * `spark_conf`, and `num_workers` @@ -234,8 +234,15 @@ public class ClusterDetails { * *

Depending on `kind`, different validations and default values will be applied. * - *

The first usage of this value is for the simple cluster form where it sets `kind = - * CLASSIC_PREVIEW`. + *

Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with + * no specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * + * [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * + * [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to + * `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` + * + *

By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. + * + *

[simple form]: https://docs.databricks.com/compute/simple-form.html */ @JsonProperty("kind") private Kind kind; @@ -376,7 +383,7 @@ public class ClusterDetails { private TerminationReason terminationReason; /** - * This field can only be used with `kind`. + * This field can only be used when `kind = CLASSIC_PREVIEW`. * *

`effective_spark_version` is determined by `spark_version` (DBR release), this field * `use_ml_runtime`, and whether `node_type_id` is gpu node or not. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java index 840b766f0..c1b8ed876 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java @@ -81,10 +81,10 @@ public class ClusterSpec { * Data security mode decides what data governance model to use when accessing data from a * cluster. * - *

The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks - * will choose the most appropriate access mode depending on your compute configuration. * - * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: - * Alias for `SINGLE_USER`. + *

The following modes can only be used when `kind = CLASSIC_PREVIEW`. * + * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on + * your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * + * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`. * *

The following modes can be used regardless of `kind`. * `NONE`: No security isolation for * multiple users sharing the cluster. Data governance features are not available in this mode. * @@ -157,7 +157,7 @@ public class ClusterSpec { private String instancePoolId; /** - * This field can only be used with `kind`. + * This field can only be used when `kind = CLASSIC_PREVIEW`. * *

When set to true, Databricks will automatically set single node related `custom_tags`, * `spark_conf`, and `num_workers` @@ -170,8 +170,15 @@ public class ClusterSpec { * *

Depending on `kind`, different validations and default values will be applied. * - *

The first usage of this value is for the simple cluster form where it sets `kind = - * CLASSIC_PREVIEW`. + *

Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with + * no specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * + * [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * + * [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to + * `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` + * + *

By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. + * + *

[simple form]: https://docs.databricks.com/compute/simple-form.html */ @JsonProperty("kind") private Kind kind; @@ -258,7 +265,7 @@ public class ClusterSpec { private Collection sshPublicKeys; /** - * This field can only be used with `kind`. + * This field can only be used when `kind = CLASSIC_PREVIEW`. * *

`effective_spark_version` is determined by `spark_version` (DBR release), this field * `use_ml_runtime`, and whether `node_type_id` is gpu node or not. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java index 844954914..460119af2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java @@ -88,10 +88,10 @@ public class CreateCluster { * Data security mode decides what data governance model to use when accessing data from a * cluster. * - *

The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks - * will choose the most appropriate access mode depending on your compute configuration. * - * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: - * Alias for `SINGLE_USER`. + *

The following modes can only be used when `kind = CLASSIC_PREVIEW`. * + * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on + * your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * + * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`. * *

The following modes can be used regardless of `kind`. * `NONE`: No security isolation for * multiple users sharing the cluster. Data governance features are not available in this mode. * @@ -164,7 +164,7 @@ public class CreateCluster { private String instancePoolId; /** - * This field can only be used with `kind`. + * This field can only be used when `kind = CLASSIC_PREVIEW`. * *

When set to true, Databricks will automatically set single node related `custom_tags`, * `spark_conf`, and `num_workers` @@ -177,8 +177,15 @@ public class CreateCluster { * *

Depending on `kind`, different validations and default values will be applied. * - *

The first usage of this value is for the simple cluster form where it sets `kind = - * CLASSIC_PREVIEW`. + *

Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with + * no specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * + * [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * + * [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to + * `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` + * + *

By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. + * + *

[simple form]: https://docs.databricks.com/compute/simple-form.html */ @JsonProperty("kind") private Kind kind; @@ -265,7 +272,7 @@ public class CreateCluster { private Collection sshPublicKeys; /** - * This field can only be used with `kind`. + * This field can only be used when `kind = CLASSIC_PREVIEW`. * *

`effective_spark_version` is determined by `spark_version` (DBR release), this field * `use_ml_runtime`, and whether `node_type_id` is gpu node or not. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataSecurityMode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataSecurityMode.java index 5de06979d..f1fbddf2a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataSecurityMode.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataSecurityMode.java @@ -7,10 +7,10 @@ /** * Data security mode decides what data governance model to use when accessing data from a cluster. * - *

The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will - * choose the most appropriate access mode depending on your compute configuration. * - * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: - * Alias for `SINGLE_USER`. + *

The following modes can only be used when `kind = CLASSIC_PREVIEW`. * + * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on + * your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * + * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`. * *

The following modes can be used regardless of `kind`. * `NONE`: No security isolation for * multiple users sharing the cluster. Data governance features are not available in this mode. * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskSpec.java index 0b6483d0d..e9dbb915e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskSpec.java @@ -7,6 +7,11 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** + * Describes the disks that are launched for each instance in the spark cluster. For example, if the + * cluster has 3 instances, each instance is configured to launch 2 disks, 100 GiB each, then + * Databricks will launch a total of 6 disks, 100 GiB each, for this cluster. + */ @Generated public class DiskSpec { /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskType.java index 2ded7257c..3e04994c6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskType.java @@ -7,13 +7,20 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Describes the disk type. */ @Generated public class DiskType { - /** */ + /** + * All Azure Disk types that Databricks supports. See + * https://docs.microsoft.com/en-us/azure/storage/storage-about-disks-and-vhds-linux#types-of-disks + */ @JsonProperty("azure_disk_volume_type") private DiskTypeAzureDiskVolumeType azureDiskVolumeType; - /** */ + /** + * All EBS volume types that Databricks supports. See https://aws.amazon.com/ebs/details/ for + * details. + */ @JsonProperty("ebs_volume_type") private DiskTypeEbsVolumeType ebsVolumeType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskTypeAzureDiskVolumeType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskTypeAzureDiskVolumeType.java index 5895888be..52c5e391b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskTypeAzureDiskVolumeType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskTypeAzureDiskVolumeType.java @@ -4,6 +4,10 @@ import com.databricks.sdk.support.Generated; +/** + * All Azure Disk types that Databricks supports. See + * https://docs.microsoft.com/en-us/azure/storage/storage-about-disks-and-vhds-linux#types-of-disks + */ @Generated public enum DiskTypeAzureDiskVolumeType { PREMIUM_LRS, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskTypeEbsVolumeType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskTypeEbsVolumeType.java index b480ff53a..8eebfb070 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskTypeEbsVolumeType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskTypeEbsVolumeType.java @@ -4,6 +4,10 @@ import com.databricks.sdk.support.Generated; +/** + * All EBS volume types that Databricks supports. See https://aws.amazon.com/ebs/details/ for + * details. + */ @Generated public enum DiskTypeEbsVolumeType { GENERAL_PURPOSE_SSD, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DockerImage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DockerImage.java index 6dc0a516e..8e435bd76 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DockerImage.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DockerImage.java @@ -9,7 +9,7 @@ @Generated public class DockerImage { - /** */ + /** Basic auth with username and password */ @JsonProperty("basic_auth") private DockerBasicAuth basicAuth; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java index 826ed2bf0..0183721b9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java @@ -85,10 +85,10 @@ public class EditCluster { * Data security mode decides what data governance model to use when accessing data from a * cluster. * - *

The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks - * will choose the most appropriate access mode depending on your compute configuration. * - * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: - * Alias for `SINGLE_USER`. + *

The following modes can only be used when `kind = CLASSIC_PREVIEW`. * + * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on + * your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * + * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`. * *

The following modes can be used regardless of `kind`. * `NONE`: No security isolation for * multiple users sharing the cluster. Data governance features are not available in this mode. * @@ -161,7 +161,7 @@ public class EditCluster { private String instancePoolId; /** - * This field can only be used with `kind`. + * This field can only be used when `kind = CLASSIC_PREVIEW`. * *

When set to true, Databricks will automatically set single node related `custom_tags`, * `spark_conf`, and `num_workers` @@ -174,8 +174,15 @@ public class EditCluster { * *

Depending on `kind`, different validations and default values will be applied. * - *

The first usage of this value is for the simple cluster form where it sets `kind = - * CLASSIC_PREVIEW`. + *

Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with + * no specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * + * [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * + * [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to + * `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` + * + *

By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. + * + *

[simple form]: https://docs.databricks.com/compute/simple-form.html */ @JsonProperty("kind") private Kind kind; @@ -262,7 +269,7 @@ public class EditCluster { private Collection sshPublicKeys; /** - * This field can only be used with `kind`. + * This field can only be used when `kind = CLASSIC_PREVIEW`. * *

`effective_spark_version` is determined by `spark_version` (DBR release), this field * `use_ml_runtime`, and whether `node_type_id` is gpu node or not. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePool.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePool.java index 01b2e412a..97feb90b6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePool.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePool.java @@ -35,7 +35,7 @@ public class GetInstancePool { private Map customTags; /** - * Tags that are added by Databricks regardless of any `custom_tags`, including: + * Tags that are added by Databricks regardless of any ``custom_tags``, including: * *

- Vendor: Databricks * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsImpl.java index 283ea3002..94689ee30 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsImpl.java @@ -36,6 +36,7 @@ public void delete(DeleteGlobalInitScriptRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -73,6 +74,7 @@ public void update(GlobalInitScriptUpdateRequest request) { try { Request req = new Request("PATCH", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStats.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStats.java index 22ff0c8a6..f2fd58676 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStats.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStats.java @@ -35,7 +35,7 @@ public class InstancePoolAndStats { private Map customTags; /** - * Tags that are added by Databricks regardless of any `custom_tags`, including: + * Tags that are added by Databricks regardless of any ``custom_tags``, including: * *

- Vendor: Databricks * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributes.java index 5858ba750..2520eca50 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributes.java @@ -7,13 +7,10 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Attributes set during instance pool creation which are related to Amazon Web Services. */ @Generated public class InstancePoolAwsAttributes { - /** - * Availability type used for the spot nodes. - * - *

The default value is defined by InstancePoolConf.instancePoolDefaultAwsAvailability - */ + /** Availability type used for the spot nodes. */ @JsonProperty("availability") private InstancePoolAwsAttributesAvailability availability; @@ -26,9 +23,6 @@ public class InstancePoolAwsAttributes { * instances are requested for this cluster, only spot instances whose bid price percentage * matches this field will be considered. Note that, for safety, we enforce this field to be no * more than 10000. - * - *

The default value and documentation here should be kept consistent with - * CommonConf.defaultSpotBidPricePercent and CommonConf.maxSpotBidPricePercent. */ @JsonProperty("spot_bid_price_percent") private Long spotBidPricePercent; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributesAvailability.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributesAvailability.java index 1666c294c..d7d7501dc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributesAvailability.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributesAvailability.java @@ -4,11 +4,7 @@ import com.databricks.sdk.support.Generated; -/** - * Availability type used for the spot nodes. - * - *

The default value is defined by InstancePoolConf.instancePoolDefaultAwsAvailability - */ +/** The set of AWS availability types supported when setting up nodes for a cluster. */ @Generated public enum InstancePoolAwsAttributesAvailability { ON_DEMAND, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAzureAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAzureAttributes.java index badfaaedb..da52c8f92 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAzureAttributes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAzureAttributes.java @@ -7,19 +7,18 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Attributes set during instance pool creation which are related to Azure. */ @Generated public class InstancePoolAzureAttributes { - /** - * Shows the Availability type used for the spot nodes. - * - *

The default value is defined by InstancePoolConf.instancePoolDefaultAzureAvailability - */ + /** Availability type used for the spot nodes. */ @JsonProperty("availability") private InstancePoolAzureAttributesAvailability availability; /** - * The default value and documentation here should be kept consistent with - * CommonConf.defaultSpotBidMaxPrice. + * With variable pricing, you have option to set a max price, in US dollars (USD) For example, the + * value 2 would be a max price of $2.00 USD per hour. If you set the max price to be -1, the VM + * won't be evicted based on price. The price for the VM will be the current price for spot or the + * price for a standard VM, which ever is less, as long as there is capacity and quota available. */ @JsonProperty("spot_bid_max_price") private Double spotBidMaxPrice; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAzureAttributesAvailability.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAzureAttributesAvailability.java index 338746acc..9dc88b420 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAzureAttributesAvailability.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAzureAttributesAvailability.java @@ -4,11 +4,7 @@ import com.databricks.sdk.support.Generated; -/** - * Shows the Availability type used for the spot nodes. - * - *

The default value is defined by InstancePoolConf.instancePoolDefaultAzureAvailability - */ +/** The set of Azure availability types supported when setting up nodes for a cluster. */ @Generated public enum InstancePoolAzureAttributesAvailability { ON_DEMAND_AZURE, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolGcpAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolGcpAttributes.java index b989f56de..a97e496ad 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolGcpAttributes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolGcpAttributes.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Attributes set during instance pool creation which are related to GCP. */ @Generated public class InstancePoolGcpAttributes { /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolState.java index 074cbb0e4..b89994ca2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolState.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolState.java @@ -4,7 +4,12 @@ import com.databricks.sdk.support.Generated; -/** Current state of the instance pool. */ +/** + * The state of a Cluster. The current allowable state transitions are as follows: + * + *

- ``ACTIVE`` -> ``STOPPED`` - ``ACTIVE`` -> ``DELETED`` - ``STOPPED`` -> ``ACTIVE`` - + * ``STOPPED`` -> ``DELETED`` + */ @Generated public enum InstancePoolState { ACTIVE, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Kind.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Kind.java index 11c0ce083..9fc487dfa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Kind.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Kind.java @@ -9,8 +9,15 @@ * *

Depending on `kind`, different validations and default values will be applied. * - *

The first usage of this value is for the simple cluster form where it sets `kind = - * CLASSIC_PREVIEW`. + *

Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with no + * specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * + * [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * + * [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to + * `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` + * + *

By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. + * + *

[simple form]: https://docs.databricks.com/compute/simple-form.html */ @Generated public enum Kind { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PendingInstanceError.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PendingInstanceError.java index 9535156ad..7e21345dd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PendingInstanceError.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PendingInstanceError.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Error message of a failed pending instances */ @Generated public class PendingInstanceError { /** */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java index 340a9515a..6e9feab08 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java @@ -74,10 +74,10 @@ public class UpdateClusterResource { * Data security mode decides what data governance model to use when accessing data from a * cluster. * - *

The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks - * will choose the most appropriate access mode depending on your compute configuration. * - * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: - * Alias for `SINGLE_USER`. + *

The following modes can only be used when `kind = CLASSIC_PREVIEW`. * + * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on + * your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * + * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`. * *

The following modes can be used regardless of `kind`. * `NONE`: No security isolation for * multiple users sharing the cluster. Data governance features are not available in this mode. * @@ -150,7 +150,7 @@ public class UpdateClusterResource { private String instancePoolId; /** - * This field can only be used with `kind`. + * This field can only be used when `kind = CLASSIC_PREVIEW`. * *

When set to true, Databricks will automatically set single node related `custom_tags`, * `spark_conf`, and `num_workers` @@ -163,8 +163,15 @@ public class UpdateClusterResource { * *

Depending on `kind`, different validations and default values will be applied. * - *

The first usage of this value is for the simple cluster form where it sets `kind = - * CLASSIC_PREVIEW`. + *

Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with + * no specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * + * [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * + * [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to + * `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` + * + *

By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. + * + *

[simple form]: https://docs.databricks.com/compute/simple-form.html */ @JsonProperty("kind") private Kind kind; @@ -251,7 +258,7 @@ public class UpdateClusterResource { private Collection sshPublicKeys; /** - * This field can only be used with `kind`. + * This field can only be used when `kind = CLASSIC_PREVIEW`. * *

`effective_spark_version` is determined by `spark_version` (DBR release), this field * `use_ml_runtime`, and whether `node_type_id` is gpu node or not. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DataType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DataType.java deleted file mode 100755 index 4c6bf3c0d..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DataType.java +++ /dev/null @@ -1,26 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.dashboards; - -import com.databricks.sdk.support.Generated; - -@Generated -public enum DataType { - DATA_TYPE_ARRAY, - DATA_TYPE_BIG_INT, - DATA_TYPE_BINARY, - DATA_TYPE_BOOLEAN, - DATA_TYPE_DATE, - DATA_TYPE_DECIMAL, - DATA_TYPE_DOUBLE, - DATA_TYPE_FLOAT, - DATA_TYPE_INT, - DATA_TYPE_INTERVAL, - DATA_TYPE_MAP, - DATA_TYPE_SMALL_INT, - DATA_TYPE_STRING, - DATA_TYPE_STRUCT, - DATA_TYPE_TIMESTAMP, - DATA_TYPE_TINY_INT, - DATA_TYPE_VOID, -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java index 34392719a..253a6b9d8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java @@ -104,8 +104,8 @@ public Wait createMessage( /** * Create conversation message. * - *

Create new message in [conversation](:method:genie/startconversation). The AI response uses - * all previously created messages in the conversation to respond. + *

Create new message in a [conversation](:method:genie/startconversation). The AI response + * uses all previously created messages in the conversation to respond. */ public Wait createMessage( GenieCreateConversationMessageRequest request) { @@ -121,6 +121,27 @@ public Wait createMessage( response); } + public GenieGetMessageQueryResultResponse executeMessageAttachmentQuery( + String spaceId, String conversationId, String messageId, String attachmentId) { + return executeMessageAttachmentQuery( + new GenieExecuteMessageAttachmentQueryRequest() + .setSpaceId(spaceId) + .setConversationId(conversationId) + .setMessageId(messageId) + .setAttachmentId(attachmentId)); + } + + /** + * Execute message attachment SQL query. + * + *

Execute the SQL for a message query attachment. Use this API when the query attachment has + * expired and needs to be re-executed. + */ + public GenieGetMessageQueryResultResponse executeMessageAttachmentQuery( + GenieExecuteMessageAttachmentQueryRequest request) { + return impl.executeMessageAttachmentQuery(request); + } + public GenieGetMessageQueryResultResponse executeMessageQuery( String spaceId, String conversationId, String messageId) { return executeMessageQuery( @@ -131,7 +152,7 @@ public GenieGetMessageQueryResultResponse executeMessageQuery( } /** - * Execute SQL query in a conversation message. + * [Deprecated] Execute SQL query in a conversation message. * *

Execute the SQL query in the message. */ @@ -157,6 +178,27 @@ public GenieMessage getMessage(GenieGetConversationMessageRequest request) { return impl.getMessage(request); } + public GenieGetMessageQueryResultResponse getMessageAttachmentQueryResult( + String spaceId, String conversationId, String messageId, String attachmentId) { + return getMessageAttachmentQueryResult( + new GenieGetMessageAttachmentQueryResultRequest() + .setSpaceId(spaceId) + .setConversationId(conversationId) + .setMessageId(messageId) + .setAttachmentId(attachmentId)); + } + + /** + * Get message attachment SQL query result. + * + *

Get the result of SQL query if the message has a query attachment. This is only available if + * a message has a query attachment and the message status is `EXECUTING_QUERY` OR `COMPLETED`. + */ + public GenieGetMessageQueryResultResponse getMessageAttachmentQueryResult( + GenieGetMessageAttachmentQueryResultRequest request) { + return impl.getMessageAttachmentQueryResult(request); + } + public GenieGetMessageQueryResultResponse getMessageQueryResult( String spaceId, String conversationId, String messageId) { return getMessageQueryResult( @@ -167,7 +209,7 @@ public GenieGetMessageQueryResultResponse getMessageQueryResult( } /** - * Get conversation message SQL query result. + * [Deprecated] Get conversation message SQL query result. * *

Get the result of SQL query if the message has a query attachment. This is only available if * a message has a query attachment and the message status is `EXECUTING_QUERY`. @@ -188,16 +230,29 @@ public GenieGetMessageQueryResultResponse getMessageQueryResultByAttachment( } /** - * Get conversation message SQL query result by attachment id. + * [Deprecated] Get conversation message SQL query result. * - *

Get the result of SQL query by attachment id This is only available if a message has a query - * attachment and the message status is `EXECUTING_QUERY`. + *

Get the result of SQL query if the message has a query attachment. This is only available if + * a message has a query attachment and the message status is `EXECUTING_QUERY` OR `COMPLETED`. */ public GenieGetMessageQueryResultResponse getMessageQueryResultByAttachment( GenieGetQueryResultByAttachmentRequest request) { return impl.getMessageQueryResultByAttachment(request); } + public GenieSpace getSpace(String spaceId) { + return getSpace(new GenieGetSpaceRequest().setSpaceId(spaceId)); + } + + /** + * Get Genie Space. + * + *

Get details of a Genie Space. + */ + public GenieSpace getSpace(GenieGetSpaceRequest request) { + return impl.getSpace(request); + } + public Wait startConversation( String spaceId, String content) { return startConversation( diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAttachment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAttachment.java index 1907900b0..588559a03 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAttachment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAttachment.java @@ -10,20 +10,33 @@ /** Genie AI Response */ @Generated public class GenieAttachment { - /** */ + /** Attachment ID */ + @JsonProperty("attachment_id") + private String attachmentId; + + /** Query Attachment if Genie responds with a SQL query */ @JsonProperty("query") - private QueryAttachment query; + private GenieQueryAttachment query; - /** */ + /** Text Attachment if Genie responds with text */ @JsonProperty("text") private TextAttachment text; - public GenieAttachment setQuery(QueryAttachment query) { + public GenieAttachment setAttachmentId(String attachmentId) { + this.attachmentId = attachmentId; + return this; + } + + public String getAttachmentId() { + return attachmentId; + } + + public GenieAttachment setQuery(GenieQueryAttachment query) { this.query = query; return this; } - public QueryAttachment getQuery() { + public GenieQueryAttachment getQuery() { return query; } @@ -41,16 +54,22 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GenieAttachment that = (GenieAttachment) o; - return Objects.equals(query, that.query) && Objects.equals(text, that.text); + return Objects.equals(attachmentId, that.attachmentId) + && Objects.equals(query, that.query) + && Objects.equals(text, that.text); } @Override public int hashCode() { - return Objects.hash(query, text); + return Objects.hash(attachmentId, query, text); } @Override public String toString() { - return new ToStringer(GenieAttachment.class).add("query", query).add("text", text).toString(); + return new ToStringer(GenieAttachment.class) + .add("attachmentId", attachmentId) + .add("query", query) + .add("text", text) + .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieConversation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieConversation.java index 50f6440d7..46c2ebd64 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieConversation.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieConversation.java @@ -9,11 +9,15 @@ @Generated public class GenieConversation { + /** Conversation ID */ + @JsonProperty("conversation_id") + private String conversationId; + /** Timestamp when the message was created */ @JsonProperty("created_timestamp") private Long createdTimestamp; - /** Conversation ID */ + /** Conversation ID. Legacy identifier, use conversation_id instead */ @JsonProperty("id") private String id; @@ -33,6 +37,15 @@ public class GenieConversation { @JsonProperty("user_id") private Long userId; + public GenieConversation setConversationId(String conversationId) { + this.conversationId = conversationId; + return this; + } + + public String getConversationId() { + return conversationId; + } + public GenieConversation setCreatedTimestamp(Long createdTimestamp) { this.createdTimestamp = createdTimestamp; return this; @@ -92,7 +105,8 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GenieConversation that = (GenieConversation) o; - return Objects.equals(createdTimestamp, that.createdTimestamp) + return Objects.equals(conversationId, that.conversationId) + && Objects.equals(createdTimestamp, that.createdTimestamp) && Objects.equals(id, that.id) && Objects.equals(lastUpdatedTimestamp, that.lastUpdatedTimestamp) && Objects.equals(spaceId, that.spaceId) @@ -102,12 +116,14 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(createdTimestamp, id, lastUpdatedTimestamp, spaceId, title, userId); + return Objects.hash( + conversationId, createdTimestamp, id, lastUpdatedTimestamp, spaceId, title, userId); } @Override public String toString() { return new ToStringer(GenieConversation.class) + .add("conversationId", conversationId) .add("createdTimestamp", createdTimestamp) .add("id", id) .add("lastUpdatedTimestamp", lastUpdatedTimestamp) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageAttachmentQueryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageAttachmentQueryRequest.java new file mode 100755 index 000000000..45fcf1257 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageAttachmentQueryRequest.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Execute message attachment SQL query */ +@Generated +public class GenieExecuteMessageAttachmentQueryRequest { + /** Attachment ID */ + @JsonIgnore private String attachmentId; + + /** Conversation ID */ + @JsonIgnore private String conversationId; + + /** Message ID */ + @JsonIgnore private String messageId; + + /** Genie space ID */ + @JsonIgnore private String spaceId; + + public GenieExecuteMessageAttachmentQueryRequest setAttachmentId(String attachmentId) { + this.attachmentId = attachmentId; + return this; + } + + public String getAttachmentId() { + return attachmentId; + } + + public GenieExecuteMessageAttachmentQueryRequest setConversationId(String conversationId) { + this.conversationId = conversationId; + return this; + } + + public String getConversationId() { + return conversationId; + } + + public GenieExecuteMessageAttachmentQueryRequest setMessageId(String messageId) { + this.messageId = messageId; + return this; + } + + public String getMessageId() { + return messageId; + } + + public GenieExecuteMessageAttachmentQueryRequest setSpaceId(String spaceId) { + this.spaceId = spaceId; + return this; + } + + public String getSpaceId() { + return spaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieExecuteMessageAttachmentQueryRequest that = (GenieExecuteMessageAttachmentQueryRequest) o; + return Objects.equals(attachmentId, that.attachmentId) + && Objects.equals(conversationId, that.conversationId) + && Objects.equals(messageId, that.messageId) + && Objects.equals(spaceId, that.spaceId); + } + + @Override + public int hashCode() { + return Objects.hash(attachmentId, conversationId, messageId, spaceId); + } + + @Override + public String toString() { + return new ToStringer(GenieExecuteMessageAttachmentQueryRequest.class) + .add("attachmentId", attachmentId) + .add("conversationId", conversationId) + .add("messageId", messageId) + .add("spaceId", spaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageQueryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageQueryRequest.java index 4ad41a28b..52305df74 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageQueryRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageQueryRequest.java @@ -7,7 +7,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; -/** Execute SQL query in a conversation message */ +/** [Deprecated] Execute SQL query in a conversation message */ @Generated public class GenieExecuteMessageQueryRequest { /** Conversation ID */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageAttachmentQueryResultRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageAttachmentQueryResultRequest.java new file mode 100755 index 000000000..b832faa07 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageAttachmentQueryResultRequest.java @@ -0,0 +1,87 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get message attachment SQL query result */ +@Generated +public class GenieGetMessageAttachmentQueryResultRequest { + /** Attachment ID */ + @JsonIgnore private String attachmentId; + + /** Conversation ID */ + @JsonIgnore private String conversationId; + + /** Message ID */ + @JsonIgnore private String messageId; + + /** Genie space ID */ + @JsonIgnore private String spaceId; + + public GenieGetMessageAttachmentQueryResultRequest setAttachmentId(String attachmentId) { + this.attachmentId = attachmentId; + return this; + } + + public String getAttachmentId() { + return attachmentId; + } + + public GenieGetMessageAttachmentQueryResultRequest setConversationId(String conversationId) { + this.conversationId = conversationId; + return this; + } + + public String getConversationId() { + return conversationId; + } + + public GenieGetMessageAttachmentQueryResultRequest setMessageId(String messageId) { + this.messageId = messageId; + return this; + } + + public String getMessageId() { + return messageId; + } + + public GenieGetMessageAttachmentQueryResultRequest setSpaceId(String spaceId) { + this.spaceId = spaceId; + return this; + } + + public String getSpaceId() { + return spaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieGetMessageAttachmentQueryResultRequest that = + (GenieGetMessageAttachmentQueryResultRequest) o; + return Objects.equals(attachmentId, that.attachmentId) + && Objects.equals(conversationId, that.conversationId) + && Objects.equals(messageId, that.messageId) + && Objects.equals(spaceId, that.spaceId); + } + + @Override + public int hashCode() { + return Objects.hash(attachmentId, conversationId, messageId, spaceId); + } + + @Override + public String toString() { + return new ToStringer(GenieGetMessageAttachmentQueryResultRequest.class) + .add("attachmentId", attachmentId) + .add("conversationId", conversationId) + .add("messageId", messageId) + .add("spaceId", spaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultRequest.java index 96a7bb58a..a65e75683 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultRequest.java @@ -7,7 +7,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; -/** Get conversation message SQL query result */ +/** [Deprecated] Get conversation message SQL query result */ @Generated public class GenieGetMessageQueryResultRequest { /** Conversation ID */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetQueryResultByAttachmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetQueryResultByAttachmentRequest.java index d3ec6043d..588fb6e71 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetQueryResultByAttachmentRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetQueryResultByAttachmentRequest.java @@ -7,7 +7,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; -/** Get conversation message SQL query result by attachment id */ +/** [Deprecated] Get conversation message SQL query result */ @Generated public class GenieGetQueryResultByAttachmentRequest { /** Attachment ID */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetSpaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetSpaceRequest.java new file mode 100755 index 000000000..d259b43df --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetSpaceRequest.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get Genie Space */ +@Generated +public class GenieGetSpaceRequest { + /** The ID associated with the Genie space */ + @JsonIgnore private String spaceId; + + public GenieGetSpaceRequest setSpaceId(String spaceId) { + this.spaceId = spaceId; + return this; + } + + public String getSpaceId() { + return spaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieGetSpaceRequest that = (GenieGetSpaceRequest) o; + return Objects.equals(spaceId, that.spaceId); + } + + @Override + public int hashCode() { + return Objects.hash(spaceId); + } + + @Override + public String toString() { + return new ToStringer(GenieGetSpaceRequest.class).add("spaceId", spaceId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java index 2c0dd670e..af444a4ab 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java @@ -33,6 +33,26 @@ public GenieMessage createMessage(GenieCreateConversationMessageRequest request) } } + @Override + public GenieGetMessageQueryResultResponse executeMessageAttachmentQuery( + GenieExecuteMessageAttachmentQueryRequest request) { + String path = + String.format( + "/api/2.0/genie/spaces/%s/conversations/%s/messages/%s/attachments/%s/execute-query", + request.getSpaceId(), + request.getConversationId(), + request.getMessageId(), + request.getAttachmentId()); + try { + Request req = new Request("POST", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GenieGetMessageQueryResultResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public GenieGetMessageQueryResultResponse executeMessageQuery( GenieExecuteMessageQueryRequest request) { @@ -66,6 +86,26 @@ public GenieMessage getMessage(GenieGetConversationMessageRequest request) { } } + @Override + public GenieGetMessageQueryResultResponse getMessageAttachmentQueryResult( + GenieGetMessageAttachmentQueryResultRequest request) { + String path = + String.format( + "/api/2.0/genie/spaces/%s/conversations/%s/messages/%s/attachments/%s/query-result", + request.getSpaceId(), + request.getConversationId(), + request.getMessageId(), + request.getAttachmentId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GenieGetMessageQueryResultResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public GenieGetMessageQueryResultResponse getMessageQueryResult( GenieGetMessageQueryResultRequest request) { @@ -103,6 +143,19 @@ public GenieGetMessageQueryResultResponse getMessageQueryResultByAttachment( } } + @Override + public GenieSpace getSpace(GenieGetSpaceRequest request) { + String path = String.format("/api/2.0/genie/spaces/%s", request.getSpaceId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GenieSpace.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public GenieStartConversationResponse startConversation( GenieStartConversationMessageRequest request) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java index 3e2aafbc1..db243673c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java @@ -10,7 +10,7 @@ @Generated public class GenieMessage { - /** AI produced response to the message */ + /** AI-generated response to the message */ @JsonProperty("attachments") private Collection attachments; @@ -26,11 +26,11 @@ public class GenieMessage { @JsonProperty("created_timestamp") private Long createdTimestamp; - /** Error message if AI failed to respond to the message */ + /** Error message if Genie failed to respond to the message */ @JsonProperty("error") private MessageError error; - /** Message ID */ + /** Message ID. Legacy identifier, use message_id instead */ @JsonProperty("id") private String id; @@ -38,7 +38,14 @@ public class GenieMessage { @JsonProperty("last_updated_timestamp") private Long lastUpdatedTimestamp; - /** The result of SQL query if the message has a query attachment */ + /** Message ID */ + @JsonProperty("message_id") + private String messageId; + + /** + * The result of SQL query if the message includes a query attachment. Deprecated. Use + * `query_result_metadata` in `GenieQueryAttachment` instead. + */ @JsonProperty("query_result") private Result queryResult; @@ -47,19 +54,20 @@ public class GenieMessage { private String spaceId; /** - * MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data + * MessageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data * sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. * - * `ASKING_AI`: Waiting for the LLM to respond to the users question. * `PENDING_WAREHOUSE`: + * `ASKING_AI`: Waiting for the LLM to respond to the user's question. * `PENDING_WAREHOUSE`: * Waiting for warehouse before the SQL query can start executing. * `EXECUTING_QUERY`: Executing - * AI provided SQL query. Get the SQL query result by calling - * [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message - * status will stay in the `EXECUTING_QUERY` until a client calls - * [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a - * response or the executing the query failed. Please see `error` field. * `COMPLETED`: Message - * processing is completed. Results are in the `attachments` field. Get the SQL query result by - * calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `SUBMITTED`: - * Message has been submitted. * `QUERY_RESULT_EXPIRED`: SQL result is not available anymore. The - * user needs to execute the query again. * `CANCELLED`: Message has been cancelled. + * a generated SQL query. Get the SQL query result by calling + * [getMessageAttachmentQueryResult](:method:genie/getMessageAttachmentQueryResult) API. * + * `FAILED`: The response generation or query execution failed. See `error` field. * `COMPLETED`: + * Message processing is completed. Results are in the `attachments` field. Get the SQL query + * result by calling + * [getMessageAttachmentQueryResult](:method:genie/getMessageAttachmentQueryResult) API. * + * `SUBMITTED`: Message has been submitted. * `QUERY_RESULT_EXPIRED`: SQL result is not available + * anymore. The user needs to rerun the query. Rerun the SQL query result by calling + * [executeMessageAttachmentQuery](:method:genie/executeMessageAttachmentQuery) API. * + * `CANCELLED`: Message has been cancelled. */ @JsonProperty("status") private MessageStatus status; @@ -131,6 +139,15 @@ public Long getLastUpdatedTimestamp() { return lastUpdatedTimestamp; } + public GenieMessage setMessageId(String messageId) { + this.messageId = messageId; + return this; + } + + public String getMessageId() { + return messageId; + } + public GenieMessage setQueryResult(Result queryResult) { this.queryResult = queryResult; return this; @@ -179,6 +196,7 @@ public boolean equals(Object o) { && Objects.equals(error, that.error) && Objects.equals(id, that.id) && Objects.equals(lastUpdatedTimestamp, that.lastUpdatedTimestamp) + && Objects.equals(messageId, that.messageId) && Objects.equals(queryResult, that.queryResult) && Objects.equals(spaceId, that.spaceId) && Objects.equals(status, that.status) @@ -195,6 +213,7 @@ public int hashCode() { error, id, lastUpdatedTimestamp, + messageId, queryResult, spaceId, status, @@ -211,6 +230,7 @@ public String toString() { .add("error", error) .add("id", id) .add("lastUpdatedTimestamp", lastUpdatedTimestamp) + .add("messageId", messageId) .add("queryResult", queryResult) .add("spaceId", spaceId) .add("status", status) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryAttachment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieQueryAttachment.java similarity index 51% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryAttachment.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieQueryAttachment.java index f3e972be4..5ad10ce62 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryAttachment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieQueryAttachment.java @@ -8,11 +8,7 @@ import java.util.Objects; @Generated -public class QueryAttachment { - /** */ - @JsonProperty("cached_query_schema") - private QuerySchema cachedQuerySchema; - +public class GenieQueryAttachment { /** Description of the query */ @JsonProperty("description") private String description; @@ -21,17 +17,6 @@ public class QueryAttachment { @JsonProperty("id") private String id; - /** If the query was created on an instruction (trusted asset) we link to the id */ - @JsonProperty("instruction_id") - private String instructionId; - - /** - * Always store the title next to the id in case the original instruction title changes or the - * instruction is deleted. - */ - @JsonProperty("instruction_title") - private String instructionTitle; - /** Time when the user updated the query last */ @JsonProperty("last_updated_timestamp") private Long lastUpdatedTimestamp; @@ -40,7 +25,14 @@ public class QueryAttachment { @JsonProperty("query") private String query; - /** */ + /** Metadata associated with the query result. */ + @JsonProperty("query_result_metadata") + private GenieResultMetadata queryResultMetadata; + + /** + * Statement Execution API statement id. Use [Get status, manifest, and result first + * chunk](:method:statementexecution/getstatement) to get the full result data. + */ @JsonProperty("statement_id") private String statementId; @@ -48,16 +40,7 @@ public class QueryAttachment { @JsonProperty("title") private String title; - public QueryAttachment setCachedQuerySchema(QuerySchema cachedQuerySchema) { - this.cachedQuerySchema = cachedQuerySchema; - return this; - } - - public QuerySchema getCachedQuerySchema() { - return cachedQuerySchema; - } - - public QueryAttachment setDescription(String description) { + public GenieQueryAttachment setDescription(String description) { this.description = description; return this; } @@ -66,7 +49,7 @@ public String getDescription() { return description; } - public QueryAttachment setId(String id) { + public GenieQueryAttachment setId(String id) { this.id = id; return this; } @@ -75,25 +58,7 @@ public String getId() { return id; } - public QueryAttachment setInstructionId(String instructionId) { - this.instructionId = instructionId; - return this; - } - - public String getInstructionId() { - return instructionId; - } - - public QueryAttachment setInstructionTitle(String instructionTitle) { - this.instructionTitle = instructionTitle; - return this; - } - - public String getInstructionTitle() { - return instructionTitle; - } - - public QueryAttachment setLastUpdatedTimestamp(Long lastUpdatedTimestamp) { + public GenieQueryAttachment setLastUpdatedTimestamp(Long lastUpdatedTimestamp) { this.lastUpdatedTimestamp = lastUpdatedTimestamp; return this; } @@ -102,7 +67,7 @@ public Long getLastUpdatedTimestamp() { return lastUpdatedTimestamp; } - public QueryAttachment setQuery(String query) { + public GenieQueryAttachment setQuery(String query) { this.query = query; return this; } @@ -111,7 +76,16 @@ public String getQuery() { return query; } - public QueryAttachment setStatementId(String statementId) { + public GenieQueryAttachment setQueryResultMetadata(GenieResultMetadata queryResultMetadata) { + this.queryResultMetadata = queryResultMetadata; + return this; + } + + public GenieResultMetadata getQueryResultMetadata() { + return queryResultMetadata; + } + + public GenieQueryAttachment setStatementId(String statementId) { this.statementId = statementId; return this; } @@ -120,7 +94,7 @@ public String getStatementId() { return statementId; } - public QueryAttachment setTitle(String title) { + public GenieQueryAttachment setTitle(String title) { this.title = title; return this; } @@ -133,14 +107,12 @@ public String getTitle() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - QueryAttachment that = (QueryAttachment) o; - return Objects.equals(cachedQuerySchema, that.cachedQuerySchema) - && Objects.equals(description, that.description) + GenieQueryAttachment that = (GenieQueryAttachment) o; + return Objects.equals(description, that.description) && Objects.equals(id, that.id) - && Objects.equals(instructionId, that.instructionId) - && Objects.equals(instructionTitle, that.instructionTitle) && Objects.equals(lastUpdatedTimestamp, that.lastUpdatedTimestamp) && Objects.equals(query, that.query) + && Objects.equals(queryResultMetadata, that.queryResultMetadata) && Objects.equals(statementId, that.statementId) && Objects.equals(title, that.title); } @@ -148,27 +120,17 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - cachedQuerySchema, - description, - id, - instructionId, - instructionTitle, - lastUpdatedTimestamp, - query, - statementId, - title); + description, id, lastUpdatedTimestamp, query, queryResultMetadata, statementId, title); } @Override public String toString() { - return new ToStringer(QueryAttachment.class) - .add("cachedQuerySchema", cachedQuerySchema) + return new ToStringer(GenieQueryAttachment.class) .add("description", description) .add("id", id) - .add("instructionId", instructionId) - .add("instructionTitle", instructionTitle) .add("lastUpdatedTimestamp", lastUpdatedTimestamp) .add("query", query) + .add("queryResultMetadata", queryResultMetadata) .add("statementId", statementId) .add("title", title) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieResultMetadata.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieResultMetadata.java new file mode 100755 index 000000000..838208d24 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieResultMetadata.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class GenieResultMetadata { + /** Indicates whether the result set is truncated. */ + @JsonProperty("is_truncated") + private Boolean isTruncated; + + /** The number of rows in the result set. */ + @JsonProperty("row_count") + private Long rowCount; + + public GenieResultMetadata setIsTruncated(Boolean isTruncated) { + this.isTruncated = isTruncated; + return this; + } + + public Boolean getIsTruncated() { + return isTruncated; + } + + public GenieResultMetadata setRowCount(Long rowCount) { + this.rowCount = rowCount; + return this; + } + + public Long getRowCount() { + return rowCount; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieResultMetadata that = (GenieResultMetadata) o; + return Objects.equals(isTruncated, that.isTruncated) && Objects.equals(rowCount, that.rowCount); + } + + @Override + public int hashCode() { + return Objects.hash(isTruncated, rowCount); + } + + @Override + public String toString() { + return new ToStringer(GenieResultMetadata.class) + .add("isTruncated", isTruncated) + .add("rowCount", rowCount) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java index 5404c9231..540e9f01e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java @@ -18,14 +18,23 @@ public interface GenieService { /** * Create conversation message. * - *

Create new message in [conversation](:method:genie/startconversation). The AI response uses - * all previously created messages in the conversation to respond. + *

Create new message in a [conversation](:method:genie/startconversation). The AI response + * uses all previously created messages in the conversation to respond. */ GenieMessage createMessage( GenieCreateConversationMessageRequest genieCreateConversationMessageRequest); /** - * Execute SQL query in a conversation message. + * Execute message attachment SQL query. + * + *

Execute the SQL for a message query attachment. Use this API when the query attachment has + * expired and needs to be re-executed. + */ + GenieGetMessageQueryResultResponse executeMessageAttachmentQuery( + GenieExecuteMessageAttachmentQueryRequest genieExecuteMessageAttachmentQueryRequest); + + /** + * [Deprecated] Execute SQL query in a conversation message. * *

Execute the SQL query in the message. */ @@ -40,7 +49,16 @@ GenieGetMessageQueryResultResponse executeMessageQuery( GenieMessage getMessage(GenieGetConversationMessageRequest genieGetConversationMessageRequest); /** - * Get conversation message SQL query result. + * Get message attachment SQL query result. + * + *

Get the result of SQL query if the message has a query attachment. This is only available if + * a message has a query attachment and the message status is `EXECUTING_QUERY` OR `COMPLETED`. + */ + GenieGetMessageQueryResultResponse getMessageAttachmentQueryResult( + GenieGetMessageAttachmentQueryResultRequest genieGetMessageAttachmentQueryResultRequest); + + /** + * [Deprecated] Get conversation message SQL query result. * *

Get the result of SQL query if the message has a query attachment. This is only available if * a message has a query attachment and the message status is `EXECUTING_QUERY`. @@ -49,14 +67,21 @@ GenieGetMessageQueryResultResponse getMessageQueryResult( GenieGetMessageQueryResultRequest genieGetMessageQueryResultRequest); /** - * Get conversation message SQL query result by attachment id. + * [Deprecated] Get conversation message SQL query result. * - *

Get the result of SQL query by attachment id This is only available if a message has a query - * attachment and the message status is `EXECUTING_QUERY`. + *

Get the result of SQL query if the message has a query attachment. This is only available if + * a message has a query attachment and the message status is `EXECUTING_QUERY` OR `COMPLETED`. */ GenieGetMessageQueryResultResponse getMessageQueryResultByAttachment( GenieGetQueryResultByAttachmentRequest genieGetQueryResultByAttachmentRequest); + /** + * Get Genie Space. + * + *

Get details of a Genie Space. + */ + GenieSpace getSpace(GenieGetSpaceRequest genieGetSpaceRequest); + /** * Start conversation. * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSpace.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSpace.java new file mode 100755 index 000000000..1be583ef7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSpace.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class GenieSpace { + /** Description of the Genie Space */ + @JsonProperty("description") + private String description; + + /** Space ID */ + @JsonProperty("space_id") + private String spaceId; + + /** Title of the Genie Space */ + @JsonProperty("title") + private String title; + + public GenieSpace setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public GenieSpace setSpaceId(String spaceId) { + this.spaceId = spaceId; + return this; + } + + public String getSpaceId() { + return spaceId; + } + + public GenieSpace setTitle(String title) { + this.title = title; + return this; + } + + public String getTitle() { + return title; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieSpace that = (GenieSpace) o; + return Objects.equals(description, that.description) + && Objects.equals(spaceId, that.spaceId) + && Objects.equals(title, that.title); + } + + @Override + public int hashCode() { + return Objects.hash(description, spaceId, title); + } + + @Override + public String toString() { + return new ToStringer(GenieSpace.class) + .add("description", description) + .add("spaceId", spaceId) + .add("title", title) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java index 5de4f1042..c1ba58d9d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java @@ -12,11 +12,13 @@ public enum MessageErrorType { CHAT_COMPLETION_NETWORK_EXCEPTION, CONTENT_FILTER_EXCEPTION, CONTEXT_EXCEEDED_EXCEPTION, + COULD_NOT_GET_MODEL_DEPLOYMENTS_EXCEPTION, COULD_NOT_GET_UC_SCHEMA_EXCEPTION, DEPLOYMENT_NOT_FOUND_EXCEPTION, FUNCTIONS_NOT_AVAILABLE_EXCEPTION, FUNCTION_ARGUMENTS_INVALID_EXCEPTION, FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION, + FUNCTION_ARGUMENTS_INVALID_TYPE_EXCEPTION, FUNCTION_CALL_MISSING_PARAMETER_EXCEPTION, GENERIC_CHAT_COMPLETION_EXCEPTION, GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION, @@ -29,6 +31,7 @@ public enum MessageErrorType { INVALID_FUNCTION_CALL_EXCEPTION, INVALID_TABLE_IDENTIFIER_EXCEPTION, LOCAL_CONTEXT_EXCEEDED_EXCEPTION, + MESSAGE_CANCELLED_WHILE_EXECUTING_EXCEPTION, MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION, MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION, NO_DEPLOYMENTS_AVAILABLE_TO_WORKSPACE, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageStatus.java index a3454a3f7..9910fe2f0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageStatus.java @@ -5,37 +5,39 @@ import com.databricks.sdk.support.Generated; /** - * MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data + * MessageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data * sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. * - * `ASKING_AI`: Waiting for the LLM to respond to the users question. * `PENDING_WAREHOUSE`: Waiting - * for warehouse before the SQL query can start executing. * `EXECUTING_QUERY`: Executing AI - * provided SQL query. Get the SQL query result by calling - * [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message status - * will stay in the `EXECUTING_QUERY` until a client calls - * [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a response - * or the executing the query failed. Please see `error` field. * `COMPLETED`: Message processing is - * completed. Results are in the `attachments` field. Get the SQL query result by calling - * [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `SUBMITTED`: Message has been - * submitted. * `QUERY_RESULT_EXPIRED`: SQL result is not available anymore. The user needs to - * execute the query again. * `CANCELLED`: Message has been cancelled. + * `ASKING_AI`: Waiting for the LLM to respond to the user's question. * `PENDING_WAREHOUSE`: + * Waiting for warehouse before the SQL query can start executing. * `EXECUTING_QUERY`: Executing a + * generated SQL query. Get the SQL query result by calling + * [getMessageAttachmentQueryResult](:method:genie/getMessageAttachmentQueryResult) API. * `FAILED`: + * The response generation or query execution failed. See `error` field. * `COMPLETED`: Message + * processing is completed. Results are in the `attachments` field. Get the SQL query result by + * calling [getMessageAttachmentQueryResult](:method:genie/getMessageAttachmentQueryResult) API. * + * `SUBMITTED`: Message has been submitted. * `QUERY_RESULT_EXPIRED`: SQL result is not available + * anymore. The user needs to rerun the query. Rerun the SQL query result by calling + * [executeMessageAttachmentQuery](:method:genie/executeMessageAttachmentQuery) API. * `CANCELLED`: + * Message has been cancelled. */ @Generated public enum MessageStatus { - ASKING_AI, // Waiting for the LLM to respond to the users question. + ASKING_AI, // Waiting for the LLM to respond to the user's question. CANCELLED, // Message has been cancelled. COMPLETED, // Message processing is completed. Results are in the `attachments` field. Get // the SQL query result by calling - // [getMessageQueryResult](:method:genie/getMessageQueryResult) API. - EXECUTING_QUERY, // Executing AI provided SQL query. Get the SQL query result by calling - // [getMessageQueryResult](:method:genie/getMessageQueryResult) API. - // **Important: The message status will stay in the `EXECUTING_QUERY` until a - // client calls [getMessageQueryResult](:method:genie/getMessageQueryResult)**. - FAILED, // Generating a response or the executing the query failed. Please see `error` - // field. + // [getMessageAttachmentQueryResult](:method:genie/getMessageAttachmentQueryResult) + // API. + EXECUTING_QUERY, // Executing a generated SQL query. Get the SQL query result by calling + // [getMessageAttachmentQueryResult](:method:genie/getMessageAttachmentQueryResult) + // API. + FAILED, // The response generation or query execution failed. See `error` field. FETCHING_METADATA, // Fetching metadata from the data sources. FILTERING_CONTEXT, // Running smart context step to determine relevant context. PENDING_WAREHOUSE, // Waiting for warehouse before the SQL query can start executing. - QUERY_RESULT_EXPIRED, // SQL result is not available anymore. The user needs to execute the query - // again. + QUERY_RESULT_EXPIRED, // SQL result is not available anymore. The user needs to rerun the query. + // Rerun + // the SQL query result by calling + // [executeMessageAttachmentQuery](:method:genie/executeMessageAttachmentQuery) + // API. SUBMITTED, // Message has been submitted. } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QuerySchema.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QuerySchema.java deleted file mode 100755 index 700920df9..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QuerySchema.java +++ /dev/null @@ -1,62 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.dashboards; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; -import java.util.Objects; - -@Generated -public class QuerySchema { - /** */ - @JsonProperty("columns") - private Collection columns; - - /** - * Used to determine if the stored query schema is compatible with the latest run. The service - * should always clear the schema when the query is re-executed. - */ - @JsonProperty("statement_id") - private String statementId; - - public QuerySchema setColumns(Collection columns) { - this.columns = columns; - return this; - } - - public Collection getColumns() { - return columns; - } - - public QuerySchema setStatementId(String statementId) { - this.statementId = statementId; - return this; - } - - public String getStatementId() { - return statementId; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - QuerySchema that = (QuerySchema) o; - return Objects.equals(columns, that.columns) && Objects.equals(statementId, that.statementId); - } - - @Override - public int hashCode() { - return Objects.hash(columns, statementId); - } - - @Override - public String toString() { - return new ToStringer(QuerySchema.class) - .add("columns", columns) - .add("statementId", statementId) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QuerySchemaColumn.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QuerySchemaColumn.java deleted file mode 100755 index afe914595..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QuerySchemaColumn.java +++ /dev/null @@ -1,74 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.dashboards; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class QuerySchemaColumn { - /** Populated from https://docs.databricks.com/sql/language-manual/sql-ref-datatypes.html */ - @JsonProperty("data_type") - private DataType dataType; - - /** */ - @JsonProperty("name") - private String name; - - /** Corresponds to type desc */ - @JsonProperty("type_text") - private String typeText; - - public QuerySchemaColumn setDataType(DataType dataType) { - this.dataType = dataType; - return this; - } - - public DataType getDataType() { - return dataType; - } - - public QuerySchemaColumn setName(String name) { - this.name = name; - return this; - } - - public String getName() { - return name; - } - - public QuerySchemaColumn setTypeText(String typeText) { - this.typeText = typeText; - return this; - } - - public String getTypeText() { - return typeText; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - QuerySchemaColumn that = (QuerySchemaColumn) o; - return Objects.equals(dataType, that.dataType) - && Objects.equals(name, that.name) - && Objects.equals(typeText, that.typeText); - } - - @Override - public int hashCode() { - return Objects.hash(dataType, name, typeText); - } - - @Override - public String toString() { - return new ToStringer(QuerySchemaColumn.class) - .add("dataType", dataType) - .add("name", name) - .add("typeText", typeText) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DownloadResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DownloadResponse.java index 627fbb315..dc199e942 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DownloadResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DownloadResponse.java @@ -11,7 +11,7 @@ @Generated public class DownloadResponse { - /** */ + /** The length of the HTTP response body in bytes. */ @JsonIgnore @Header("content-length") private Long contentLength; @@ -24,7 +24,7 @@ public class DownloadResponse { /** */ @JsonIgnore private InputStream contents; - /** */ + /** The last modified time of the file in HTTP-date (RFC 7231) format. */ @JsonIgnore @Header("last-modified") private String lastModified; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetMetadataResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetMetadataResponse.java index bba5f07aa..5fda9475b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetMetadataResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetMetadataResponse.java @@ -10,7 +10,7 @@ @Generated public class GetMetadataResponse { - /** */ + /** The length of the HTTP response body in bytes. */ @JsonIgnore @Header("content-length") private Long contentLength; @@ -20,7 +20,7 @@ public class GetMetadataResponse { @Header("content-type") private String contentType; - /** */ + /** The last modified time of the file in HTTP-date (RFC 7231) format. */ @JsonIgnore @Header("last-modified") private String lastModified; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java index ebf3edff6..9dbd706f9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java @@ -53,9 +53,8 @@ public class BaseRun { /** * effective_performance_target is the actual performance target used by the run during execution. - * effective_performance_target can differ from performance_target depending on if the job was - * eligible to be cost-optimized (e.g. contains at least 1 serverless task) or if we specifically - * override the value for the run (ex. RunNow). + * effective_performance_target can differ from the client-set performance_target depending on if + * the job was eligible to be cost-optimized. */ @JsonProperty("effective_performance_target") private PerformanceTarget effectivePerformanceTarget; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ComputeConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ComputeConfig.java new file mode 100755 index 000000000..c183e1b1a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ComputeConfig.java @@ -0,0 +1,75 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Next field: 4 */ +@Generated +public class ComputeConfig { + /** IDof the GPU pool to use. */ + @JsonProperty("gpu_node_pool_id") + private String gpuNodePoolId; + + /** GPU type. */ + @JsonProperty("gpu_type") + private String gpuType; + + /** Number of GPUs. */ + @JsonProperty("num_gpus") + private Long numGpus; + + public ComputeConfig setGpuNodePoolId(String gpuNodePoolId) { + this.gpuNodePoolId = gpuNodePoolId; + return this; + } + + public String getGpuNodePoolId() { + return gpuNodePoolId; + } + + public ComputeConfig setGpuType(String gpuType) { + this.gpuType = gpuType; + return this; + } + + public String getGpuType() { + return gpuType; + } + + public ComputeConfig setNumGpus(Long numGpus) { + this.numGpus = numGpus; + return this; + } + + public Long getNumGpus() { + return numGpus; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ComputeConfig that = (ComputeConfig) o; + return Objects.equals(gpuNodePoolId, that.gpuNodePoolId) + && Objects.equals(gpuType, that.gpuType) + && Objects.equals(numGpus, that.numGpus); + } + + @Override + public int hashCode() { + return Objects.hash(gpuNodePoolId, gpuType, numGpus); + } + + @Override + public String toString() { + return new ToStringer(ComputeConfig.class) + .add("gpuNodePoolId", gpuNodePoolId) + .add("gpuType", gpuType) + .add("numGpus", numGpus) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GenAiComputeTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GenAiComputeTask.java new file mode 100755 index 000000000..68e0257df --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GenAiComputeTask.java @@ -0,0 +1,175 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Next field: 9 */ +@Generated +public class GenAiComputeTask { + /** Command launcher to run the actual script, e.g. bash, python etc. */ + @JsonProperty("command") + private String command; + + /** Next field: 4 */ + @JsonProperty("compute") + private ComputeConfig compute; + + /** Runtime image */ + @JsonProperty("dl_runtime_image") + private String dlRuntimeImage; + + /** + * Optional string containing the name of the MLflow experiment to log the run to. If name is not + * found, backend will create the mlflow experiment using the name. + */ + @JsonProperty("mlflow_experiment_name") + private String mlflowExperimentName; + + /** + * Optional location type of the training script. When set to `WORKSPACE`, the script will be + * retrieved from the local Databricks workspace. When set to `GIT`, the script will be retrieved + * from a Git repository defined in `git_source`. If the value is empty, the task will use `GIT` + * if `git_source` is defined and `WORKSPACE` otherwise. * `WORKSPACE`: Script is located in + * Databricks workspace. * `GIT`: Script is located in cloud Git provider. + */ + @JsonProperty("source") + private Source source; + + /** + * The training script file path to be executed. Cloud file URIs (such as dbfs:/, s3:/, adls:/, + * gcs:/) and workspace paths are supported. For python files stored in the Databricks workspace, + * the path must be absolute and begin with `/`. For files stored in a remote repository, the path + * must be relative. This field is required. + */ + @JsonProperty("training_script_path") + private String trainingScriptPath; + + /** + * Optional string containing model parameters passed to the training script in yaml format. If + * present, then the content in yaml_parameters_file_path will be ignored. + */ + @JsonProperty("yaml_parameters") + private String yamlParameters; + + /** Optional path to a YAML file containing model parameters passed to the training script. */ + @JsonProperty("yaml_parameters_file_path") + private String yamlParametersFilePath; + + public GenAiComputeTask setCommand(String command) { + this.command = command; + return this; + } + + public String getCommand() { + return command; + } + + public GenAiComputeTask setCompute(ComputeConfig compute) { + this.compute = compute; + return this; + } + + public ComputeConfig getCompute() { + return compute; + } + + public GenAiComputeTask setDlRuntimeImage(String dlRuntimeImage) { + this.dlRuntimeImage = dlRuntimeImage; + return this; + } + + public String getDlRuntimeImage() { + return dlRuntimeImage; + } + + public GenAiComputeTask setMlflowExperimentName(String mlflowExperimentName) { + this.mlflowExperimentName = mlflowExperimentName; + return this; + } + + public String getMlflowExperimentName() { + return mlflowExperimentName; + } + + public GenAiComputeTask setSource(Source source) { + this.source = source; + return this; + } + + public Source getSource() { + return source; + } + + public GenAiComputeTask setTrainingScriptPath(String trainingScriptPath) { + this.trainingScriptPath = trainingScriptPath; + return this; + } + + public String getTrainingScriptPath() { + return trainingScriptPath; + } + + public GenAiComputeTask setYamlParameters(String yamlParameters) { + this.yamlParameters = yamlParameters; + return this; + } + + public String getYamlParameters() { + return yamlParameters; + } + + public GenAiComputeTask setYamlParametersFilePath(String yamlParametersFilePath) { + this.yamlParametersFilePath = yamlParametersFilePath; + return this; + } + + public String getYamlParametersFilePath() { + return yamlParametersFilePath; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenAiComputeTask that = (GenAiComputeTask) o; + return Objects.equals(command, that.command) + && Objects.equals(compute, that.compute) + && Objects.equals(dlRuntimeImage, that.dlRuntimeImage) + && Objects.equals(mlflowExperimentName, that.mlflowExperimentName) + && Objects.equals(source, that.source) + && Objects.equals(trainingScriptPath, that.trainingScriptPath) + && Objects.equals(yamlParameters, that.yamlParameters) + && Objects.equals(yamlParametersFilePath, that.yamlParametersFilePath); + } + + @Override + public int hashCode() { + return Objects.hash( + command, + compute, + dlRuntimeImage, + mlflowExperimentName, + source, + trainingScriptPath, + yamlParameters, + yamlParametersFilePath); + } + + @Override + public String toString() { + return new ToStringer(GenAiComputeTask.class) + .add("command", command) + .add("compute", compute) + .add("dlRuntimeImage", dlRuntimeImage) + .add("mlflowExperimentName", mlflowExperimentName) + .add("source", source) + .add("trainingScriptPath", trainingScriptPath) + .add("yamlParameters", yamlParameters) + .add("yamlParametersFilePath", yamlParametersFilePath) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsImpl.java index 48347babe..0902a4b5f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsImpl.java @@ -18,11 +18,10 @@ public JobsImpl(ApiClient apiClient) { @Override public void cancelAllRuns(CancelAllRuns request) { - String path = "/api/2.1/jobs/runs/cancel-all"; + String path = "/api/2.2/jobs/runs/cancel-all"; try { Request req = new Request("POST", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, CancelAllRunsResponse.class); } catch (IOException e) { @@ -32,11 +31,10 @@ public void cancelAllRuns(CancelAllRuns request) { @Override public void cancelRun(CancelRun request) { - String path = "/api/2.1/jobs/runs/cancel"; + String path = "/api/2.2/jobs/runs/cancel"; try { Request req = new Request("POST", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, CancelRunResponse.class); } catch (IOException e) { @@ -46,7 +44,7 @@ public void cancelRun(CancelRun request) { @Override public CreateResponse create(CreateJob request) { - String path = "/api/2.1/jobs/create"; + String path = "/api/2.2/jobs/create"; try { Request req = new Request("POST", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); @@ -60,11 +58,10 @@ public CreateResponse create(CreateJob request) { @Override public void delete(DeleteJob request) { - String path = "/api/2.1/jobs/delete"; + String path = "/api/2.2/jobs/delete"; try { Request req = new Request("POST", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { @@ -74,11 +71,10 @@ public void delete(DeleteJob request) { @Override public void deleteRun(DeleteRun request) { - String path = "/api/2.1/jobs/runs/delete"; + String path = "/api/2.2/jobs/runs/delete"; try { Request req = new Request("POST", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, DeleteRunResponse.class); } catch (IOException e) { @@ -88,7 +84,7 @@ public void deleteRun(DeleteRun request) { @Override public ExportRunOutput exportRun(ExportRunRequest request) { - String path = "/api/2.1/jobs/runs/export"; + String path = "/api/2.2/jobs/runs/export"; try { Request req = new Request("GET", path); ApiClient.setQuery(req, request); @@ -101,7 +97,7 @@ public ExportRunOutput exportRun(ExportRunRequest request) { @Override public Job get(GetJobRequest request) { - String path = "/api/2.1/jobs/get"; + String path = "/api/2.2/jobs/get"; try { Request req = new Request("GET", path); ApiClient.setQuery(req, request); @@ -141,7 +137,7 @@ public JobPermissions getPermissions(GetJobPermissionsRequest request) { @Override public Run getRun(GetRunRequest request) { - String path = "/api/2.1/jobs/runs/get"; + String path = "/api/2.2/jobs/runs/get"; try { Request req = new Request("GET", path); ApiClient.setQuery(req, request); @@ -154,7 +150,7 @@ public Run getRun(GetRunRequest request) { @Override public RunOutput getRunOutput(GetRunOutputRequest request) { - String path = "/api/2.1/jobs/runs/get-output"; + String path = "/api/2.2/jobs/runs/get-output"; try { Request req = new Request("GET", path); ApiClient.setQuery(req, request); @@ -167,7 +163,7 @@ public RunOutput getRunOutput(GetRunOutputRequest request) { @Override public ListJobsResponse list(ListJobsRequest request) { - String path = "/api/2.1/jobs/list"; + String path = "/api/2.2/jobs/list"; try { Request req = new Request("GET", path); ApiClient.setQuery(req, request); @@ -180,7 +176,7 @@ public ListJobsResponse list(ListJobsRequest request) { @Override public ListRunsResponse listRuns(ListRunsRequest request) { - String path = "/api/2.1/jobs/runs/list"; + String path = "/api/2.2/jobs/runs/list"; try { Request req = new Request("GET", path); ApiClient.setQuery(req, request); @@ -193,7 +189,7 @@ public ListRunsResponse listRuns(ListRunsRequest request) { @Override public RepairRunResponse repairRun(RepairRun request) { - String path = "/api/2.1/jobs/runs/repair"; + String path = "/api/2.2/jobs/runs/repair"; try { Request req = new Request("POST", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); @@ -207,11 +203,10 @@ public RepairRunResponse repairRun(RepairRun request) { @Override public void reset(ResetJob request) { - String path = "/api/2.1/jobs/reset"; + String path = "/api/2.2/jobs/reset"; try { Request req = new Request("POST", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, ResetResponse.class); } catch (IOException e) { @@ -221,7 +216,7 @@ public void reset(ResetJob request) { @Override public RunNowResponse runNow(RunNow request) { - String path = "/api/2.1/jobs/run-now"; + String path = "/api/2.2/jobs/run-now"; try { Request req = new Request("POST", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); @@ -249,7 +244,7 @@ public JobPermissions setPermissions(JobPermissionsRequest request) { @Override public SubmitRunResponse submit(SubmitRun request) { - String path = "/api/2.1/jobs/runs/submit"; + String path = "/api/2.2/jobs/runs/submit"; try { Request req = new Request("POST", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); @@ -263,11 +258,10 @@ public SubmitRunResponse submit(SubmitRun request) { @Override public void update(UpdateJob request) { - String path = "/api/2.1/jobs/update"; + String path = "/api/2.2/jobs/update"; try { Request req = new Request("POST", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java index 09bf4f5f5..be8386cf9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java @@ -54,9 +54,8 @@ public class Run { /** * effective_performance_target is the actual performance target used by the run during execution. - * effective_performance_target can differ from performance_target depending on if the job was - * eligible to be cost-optimized (e.g. contains at least 1 serverless task) or if we specifically - * override the value for the run (ex. RunNow). + * effective_performance_target can differ from the client-set performance_target depending on if + * the job was eligible to be cost-optimized. */ @JsonProperty("effective_performance_target") private PerformanceTarget effectivePerformanceTarget; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunLifecycleStateV2State.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunLifecycleStateV2State.java index 3b97b8600..de61fe6aa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunLifecycleStateV2State.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunLifecycleStateV2State.java @@ -13,4 +13,5 @@ public enum RunLifecycleStateV2State { RUNNING, TERMINATED, TERMINATING, + WAITING, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java index d76e352b9..1ca7bf0b9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java @@ -86,8 +86,8 @@ public class RunNow { /** * PerformanceTarget defines how performant or cost efficient the execution of run on serverless - * compute should be. For RunNow request, the run will execute with this settings instead of ones - * defined in job. + * compute should be. For RunNow, this performance target will override the target defined on the + * job-level. */ @JsonProperty("performance_target") private PerformanceTarget performanceTarget; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java index aec2fdf23..c1e6d25ed 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java @@ -81,9 +81,8 @@ public class RunTask { /** * effective_performance_target is the actual performance target used by the run during execution. - * effective_performance_target can differ from performance_target depending on if the job was - * eligible to be cost-optimized (e.g. contains at least 1 serverless task) or if an override was - * provided for the run (ex. RunNow). + * effective_performance_target can differ from the client-set performance_target depending on if + * the job was eligible to be cost-optimized. */ @JsonProperty("effective_performance_target") private PerformanceTarget effectivePerformanceTarget; @@ -134,6 +133,10 @@ public class RunTask { @JsonProperty("for_each_task") private RunForEachTask forEachTask; + /** Next field: 9 */ + @JsonProperty("gen_ai_compute_task") + private GenAiComputeTask genAiComputeTask; + /** * An optional specification for a remote Git repository containing the source code used by tasks. * Version-controlled source code is supported by notebook, dbt, Python script, and SQL File @@ -442,6 +445,15 @@ public RunForEachTask getForEachTask() { return forEachTask; } + public RunTask setGenAiComputeTask(GenAiComputeTask genAiComputeTask) { + this.genAiComputeTask = genAiComputeTask; + return this; + } + + public GenAiComputeTask getGenAiComputeTask() { + return genAiComputeTask; + } + public RunTask setGitSource(GitSource gitSource) { this.gitSource = gitSource; return this; @@ -697,6 +709,7 @@ public boolean equals(Object o) { && Objects.equals(executionDuration, that.executionDuration) && Objects.equals(existingClusterId, that.existingClusterId) && Objects.equals(forEachTask, that.forEachTask) + && Objects.equals(genAiComputeTask, that.genAiComputeTask) && Objects.equals(gitSource, that.gitSource) && Objects.equals(jobClusterKey, that.jobClusterKey) && Objects.equals(libraries, that.libraries) @@ -744,6 +757,7 @@ public int hashCode() { executionDuration, existingClusterId, forEachTask, + genAiComputeTask, gitSource, jobClusterKey, libraries, @@ -791,6 +805,7 @@ public String toString() { .add("executionDuration", executionDuration) .add("existingClusterId", existingClusterId) .add("forEachTask", forEachTask) + .add("genAiComputeTask", genAiComputeTask) .add("gitSource", gitSource) .add("jobClusterKey", jobClusterKey) .add("libraries", libraries) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java index 985f0bcfd..89f802477 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java @@ -74,6 +74,10 @@ public class SubmitTask { @JsonProperty("for_each_task") private ForEachTask forEachTask; + /** Next field: 9 */ + @JsonProperty("gen_ai_compute_task") + private GenAiComputeTask genAiComputeTask; + /** An optional set of health rules that can be defined for this job. */ @JsonProperty("health") private JobsHealthRules health; @@ -259,6 +263,15 @@ public ForEachTask getForEachTask() { return forEachTask; } + public SubmitTask setGenAiComputeTask(GenAiComputeTask genAiComputeTask) { + this.genAiComputeTask = genAiComputeTask; + return this; + } + + public GenAiComputeTask getGenAiComputeTask() { + return genAiComputeTask; + } + public SubmitTask setHealth(JobsHealthRules health) { this.health = health; return this; @@ -417,6 +430,7 @@ public boolean equals(Object o) { && Objects.equals(environmentKey, that.environmentKey) && Objects.equals(existingClusterId, that.existingClusterId) && Objects.equals(forEachTask, that.forEachTask) + && Objects.equals(genAiComputeTask, that.genAiComputeTask) && Objects.equals(health, that.health) && Objects.equals(libraries, that.libraries) && Objects.equals(newCluster, that.newCluster) @@ -447,6 +461,7 @@ public int hashCode() { environmentKey, existingClusterId, forEachTask, + genAiComputeTask, health, libraries, newCluster, @@ -477,6 +492,7 @@ public String toString() { .add("environmentKey", environmentKey) .add("existingClusterId", existingClusterId) .add("forEachTask", forEachTask) + .add("genAiComputeTask", genAiComputeTask) .add("health", health) .add("libraries", libraries) .add("newCluster", newCluster) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java index 5805eeb5e..27f43b9f3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java @@ -79,6 +79,10 @@ public class Task { @JsonProperty("for_each_task") private ForEachTask forEachTask; + /** Next field: 9 */ + @JsonProperty("gen_ai_compute_task") + private GenAiComputeTask genAiComputeTask; + /** An optional set of health rules that can be defined for this job. */ @JsonProperty("health") private JobsHealthRules health; @@ -307,6 +311,15 @@ public ForEachTask getForEachTask() { return forEachTask; } + public Task setGenAiComputeTask(GenAiComputeTask genAiComputeTask) { + this.genAiComputeTask = genAiComputeTask; + return this; + } + + public GenAiComputeTask getGenAiComputeTask() { + return genAiComputeTask; + } + public Task setHealth(JobsHealthRules health) { this.health = health; return this; @@ -502,6 +515,7 @@ public boolean equals(Object o) { && Objects.equals(environmentKey, that.environmentKey) && Objects.equals(existingClusterId, that.existingClusterId) && Objects.equals(forEachTask, that.forEachTask) + && Objects.equals(genAiComputeTask, that.genAiComputeTask) && Objects.equals(health, that.health) && Objects.equals(jobClusterKey, that.jobClusterKey) && Objects.equals(libraries, that.libraries) @@ -537,6 +551,7 @@ public int hashCode() { environmentKey, existingClusterId, forEachTask, + genAiComputeTask, health, jobClusterKey, libraries, @@ -572,6 +587,7 @@ public String toString() { .add("environmentKey", environmentKey) .add("existingClusterId", existingClusterId) .add("forEachTask", forEachTask) + .add("genAiComputeTask", genAiComputeTask) .add("health", health) .add("jobClusterKey", jobClusterKey) .add("libraries", libraries) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java index 699094949..cc7583362 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java @@ -6,6 +6,7 @@ @Generated public enum AssetType { + ASSET_TYPE_APP, ASSET_TYPE_DATA_TABLE, ASSET_TYPE_GIT_REPO, ASSET_TYPE_MEDIA, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentRequest.java new file mode 100755 index 000000000..d5b1322e3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentRequest.java @@ -0,0 +1,336 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class CreateForecastingExperimentRequest { + /** + * Name of the column in the input training table used to customize the weight for each time + * series to calculate weighted metrics. + */ + @JsonProperty("custom_weights_column") + private String customWeightsColumn; + + /** + * The quantity of the input data granularity. Together with data_granularity_unit field, this + * defines the time interval between consecutive rows in the time series data. For now, only 1 + * second, 1/5/10/15/30 minutes, 1 hour, 1 day, 1 week, 1 month, 1 quarter, 1 year are supported. + */ + @JsonProperty("data_granularity_quantity") + private Long dataGranularityQuantity; + + /** + * The time unit of the input data granularity. Together with data_granularity_quantity field, + * this defines the time interval between consecutive rows in the time series data. Possible + * values: * 'W' (weeks) * 'D' / 'days' / 'day' * 'hours' / 'hour' / 'hr' / 'h' * 'm' / 'minute' / + * 'min' / 'minutes' / 'T' * 'S' / 'seconds' / 'sec' / 'second' * 'M' / 'month' / 'months' * 'Q' / + * 'quarter' / 'quarters' * 'Y' / 'year' / 'years' + */ + @JsonProperty("data_granularity_unit") + private String dataGranularityUnit; + + /** + * The path to the created experiment. This is the path where the experiment will be stored in the + * workspace. + */ + @JsonProperty("experiment_path") + private String experimentPath; + + /** + * The number of time steps into the future for which predictions should be made. This value + * represents a multiple of data_granularity_unit and data_granularity_quantity determining how + * far ahead the model will forecast. + */ + @JsonProperty("forecast_horizon") + private Long forecastHorizon; + + /** + * Region code(s) to consider when automatically adding holiday features. When empty, no holiday + * features are added. Only supports 1 holiday region for now. + */ + @JsonProperty("holiday_regions") + private Collection holidayRegions; + + /** + * The maximum duration in minutes for which the experiment is allowed to run. If the experiment + * exceeds this time limit it will be stopped automatically. + */ + @JsonProperty("max_runtime") + private Long maxRuntime; + + /** + * The three-level (fully qualified) path to a unity catalog table. This table path serves to + * store the predictions. + */ + @JsonProperty("prediction_data_path") + private String predictionDataPath; + + /** The evaluation metric used to optimize the forecasting model. */ + @JsonProperty("primary_metric") + private String primaryMetric; + + /** + * The three-level (fully qualified) path to a unity catalog model. This model path serves to + * store the best model. + */ + @JsonProperty("register_to") + private String registerTo; + + /** + * Name of the column in the input training table used for custom data splits. The values in this + * column must be "train", "validate", or "test" to indicate which split each row belongs to. + */ + @JsonProperty("split_column") + private String splitColumn; + + /** + * Name of the column in the input training table that serves as the prediction target. The values + * in this column will be used as the ground truth for model training. + */ + @JsonProperty("target_column") + private String targetColumn; + + /** Name of the column in the input training table that represents the timestamp of each row. */ + @JsonProperty("time_column") + private String timeColumn; + + /** + * Name of the column in the input training table used to group the dataset to predict individual + * time series + */ + @JsonProperty("timeseries_identifier_columns") + private Collection timeseriesIdentifierColumns; + + /** + * The three-level (fully qualified) name of a unity catalog table. This table serves as the + * training data for the forecasting model. + */ + @JsonProperty("train_data_path") + private String trainDataPath; + + /** + * The list of frameworks to include for model tuning. Possible values: 'Prophet', 'ARIMA', + * 'DeepAR'. An empty list will include all supported frameworks. + */ + @JsonProperty("training_frameworks") + private Collection trainingFrameworks; + + public CreateForecastingExperimentRequest setCustomWeightsColumn(String customWeightsColumn) { + this.customWeightsColumn = customWeightsColumn; + return this; + } + + public String getCustomWeightsColumn() { + return customWeightsColumn; + } + + public CreateForecastingExperimentRequest setDataGranularityQuantity( + Long dataGranularityQuantity) { + this.dataGranularityQuantity = dataGranularityQuantity; + return this; + } + + public Long getDataGranularityQuantity() { + return dataGranularityQuantity; + } + + public CreateForecastingExperimentRequest setDataGranularityUnit(String dataGranularityUnit) { + this.dataGranularityUnit = dataGranularityUnit; + return this; + } + + public String getDataGranularityUnit() { + return dataGranularityUnit; + } + + public CreateForecastingExperimentRequest setExperimentPath(String experimentPath) { + this.experimentPath = experimentPath; + return this; + } + + public String getExperimentPath() { + return experimentPath; + } + + public CreateForecastingExperimentRequest setForecastHorizon(Long forecastHorizon) { + this.forecastHorizon = forecastHorizon; + return this; + } + + public Long getForecastHorizon() { + return forecastHorizon; + } + + public CreateForecastingExperimentRequest setHolidayRegions(Collection holidayRegions) { + this.holidayRegions = holidayRegions; + return this; + } + + public Collection getHolidayRegions() { + return holidayRegions; + } + + public CreateForecastingExperimentRequest setMaxRuntime(Long maxRuntime) { + this.maxRuntime = maxRuntime; + return this; + } + + public Long getMaxRuntime() { + return maxRuntime; + } + + public CreateForecastingExperimentRequest setPredictionDataPath(String predictionDataPath) { + this.predictionDataPath = predictionDataPath; + return this; + } + + public String getPredictionDataPath() { + return predictionDataPath; + } + + public CreateForecastingExperimentRequest setPrimaryMetric(String primaryMetric) { + this.primaryMetric = primaryMetric; + return this; + } + + public String getPrimaryMetric() { + return primaryMetric; + } + + public CreateForecastingExperimentRequest setRegisterTo(String registerTo) { + this.registerTo = registerTo; + return this; + } + + public String getRegisterTo() { + return registerTo; + } + + public CreateForecastingExperimentRequest setSplitColumn(String splitColumn) { + this.splitColumn = splitColumn; + return this; + } + + public String getSplitColumn() { + return splitColumn; + } + + public CreateForecastingExperimentRequest setTargetColumn(String targetColumn) { + this.targetColumn = targetColumn; + return this; + } + + public String getTargetColumn() { + return targetColumn; + } + + public CreateForecastingExperimentRequest setTimeColumn(String timeColumn) { + this.timeColumn = timeColumn; + return this; + } + + public String getTimeColumn() { + return timeColumn; + } + + public CreateForecastingExperimentRequest setTimeseriesIdentifierColumns( + Collection timeseriesIdentifierColumns) { + this.timeseriesIdentifierColumns = timeseriesIdentifierColumns; + return this; + } + + public Collection getTimeseriesIdentifierColumns() { + return timeseriesIdentifierColumns; + } + + public CreateForecastingExperimentRequest setTrainDataPath(String trainDataPath) { + this.trainDataPath = trainDataPath; + return this; + } + + public String getTrainDataPath() { + return trainDataPath; + } + + public CreateForecastingExperimentRequest setTrainingFrameworks( + Collection trainingFrameworks) { + this.trainingFrameworks = trainingFrameworks; + return this; + } + + public Collection getTrainingFrameworks() { + return trainingFrameworks; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateForecastingExperimentRequest that = (CreateForecastingExperimentRequest) o; + return Objects.equals(customWeightsColumn, that.customWeightsColumn) + && Objects.equals(dataGranularityQuantity, that.dataGranularityQuantity) + && Objects.equals(dataGranularityUnit, that.dataGranularityUnit) + && Objects.equals(experimentPath, that.experimentPath) + && Objects.equals(forecastHorizon, that.forecastHorizon) + && Objects.equals(holidayRegions, that.holidayRegions) + && Objects.equals(maxRuntime, that.maxRuntime) + && Objects.equals(predictionDataPath, that.predictionDataPath) + && Objects.equals(primaryMetric, that.primaryMetric) + && Objects.equals(registerTo, that.registerTo) + && Objects.equals(splitColumn, that.splitColumn) + && Objects.equals(targetColumn, that.targetColumn) + && Objects.equals(timeColumn, that.timeColumn) + && Objects.equals(timeseriesIdentifierColumns, that.timeseriesIdentifierColumns) + && Objects.equals(trainDataPath, that.trainDataPath) + && Objects.equals(trainingFrameworks, that.trainingFrameworks); + } + + @Override + public int hashCode() { + return Objects.hash( + customWeightsColumn, + dataGranularityQuantity, + dataGranularityUnit, + experimentPath, + forecastHorizon, + holidayRegions, + maxRuntime, + predictionDataPath, + primaryMetric, + registerTo, + splitColumn, + targetColumn, + timeColumn, + timeseriesIdentifierColumns, + trainDataPath, + trainingFrameworks); + } + + @Override + public String toString() { + return new ToStringer(CreateForecastingExperimentRequest.class) + .add("customWeightsColumn", customWeightsColumn) + .add("dataGranularityQuantity", dataGranularityQuantity) + .add("dataGranularityUnit", dataGranularityUnit) + .add("experimentPath", experimentPath) + .add("forecastHorizon", forecastHorizon) + .add("holidayRegions", holidayRegions) + .add("maxRuntime", maxRuntime) + .add("predictionDataPath", predictionDataPath) + .add("primaryMetric", primaryMetric) + .add("registerTo", registerTo) + .add("splitColumn", splitColumn) + .add("targetColumn", targetColumn) + .add("timeColumn", timeColumn) + .add("timeseriesIdentifierColumns", timeseriesIdentifierColumns) + .add("trainDataPath", trainDataPath) + .add("trainingFrameworks", trainingFrameworks) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentResponse.java new file mode 100755 index 000000000..08dc1960f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentResponse.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateForecastingExperimentResponse { + /** The unique ID of the created forecasting experiment */ + @JsonProperty("experiment_id") + private String experimentId; + + public CreateForecastingExperimentResponse setExperimentId(String experimentId) { + this.experimentId = experimentId; + return this; + } + + public String getExperimentId() { + return experimentId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateForecastingExperimentResponse that = (CreateForecastingExperimentResponse) o; + return Objects.equals(experimentId, that.experimentId); + } + + @Override + public int hashCode() { + return Objects.hash(experimentId); + } + + @Override + public String toString() { + return new ToStringer(CreateForecastingExperimentResponse.class) + .add("experimentId", experimentId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRun.java index 4ee6ee0ae..f4bad4f58 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRun.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRun.java @@ -14,6 +14,10 @@ public class CreateRun { @JsonProperty("experiment_id") private String experimentId; + /** The name of the run. */ + @JsonProperty("run_name") + private String runName; + /** Unix timestamp in milliseconds of when the run started. */ @JsonProperty("start_time") private Long startTime; @@ -38,6 +42,15 @@ public String getExperimentId() { return experimentId; } + public CreateRun setRunName(String runName) { + this.runName = runName; + return this; + } + + public String getRunName() { + return runName; + } + public CreateRun setStartTime(Long startTime) { this.startTime = startTime; return this; @@ -71,6 +84,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; CreateRun that = (CreateRun) o; return Objects.equals(experimentId, that.experimentId) + && Objects.equals(runName, that.runName) && Objects.equals(startTime, that.startTime) && Objects.equals(tags, that.tags) && Objects.equals(userId, that.userId); @@ -78,13 +92,14 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(experimentId, startTime, tags, userId); + return Objects.hash(experimentId, runName, startTime, tags, userId); } @Override public String toString() { return new ToStringer(CreateRun.class) .add("experimentId", experimentId) + .add("runName", runName) .add("startTime", startTime) .add("tags", tags) .add("userId", userId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Dataset.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Dataset.java index a1ba57af2..325ee15eb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Dataset.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Dataset.java @@ -7,6 +7,10 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** + * Dataset. Represents a reference to data used for training, testing, or evaluation during the + * model development process. + */ @Generated public class Dataset { /** @@ -34,14 +38,14 @@ public class Dataset { @JsonProperty("schema") private String schema; - /** The type of the dataset source, e.g. ‘databricks-uc-table’, ‘DBFS’, ‘S3’, ... */ - @JsonProperty("source") - private String source; - /** * Source information for the dataset. Note that the source may not exactly reproduce the dataset * if it was transformed / modified before use with MLflow. */ + @JsonProperty("source") + private String source; + + /** The type of the dataset source, e.g. ‘databricks-uc-table’, ‘DBFS’, ‘S3’, ... */ @JsonProperty("source_type") private String sourceType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DatasetInput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DatasetInput.java index 24740b8ac..dfcb8963c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DatasetInput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DatasetInput.java @@ -8,6 +8,7 @@ import java.util.Collection; import java.util.Objects; +/** DatasetInput. Represents a dataset and input tags. */ @Generated public class DatasetInput { /** The dataset being used as a Run input. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Experiment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Experiment.java index 2b6cb8efb..b23e8994f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Experiment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Experiment.java @@ -8,6 +8,7 @@ import java.util.Collection; import java.util.Objects; +/** An experiment and its metadata. */ @Generated public class Experiment { /** Location where artifacts for the experiment are stored. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentTag.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentTag.java index b3703c226..125870ff3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentTag.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentTag.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** A tag for an experiment. */ @Generated public class ExperimentTag { /** The tag key. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java index 8c4c27e8c..65d6f355d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java @@ -43,7 +43,7 @@ public CreateExperimentResponse createExperiment(String name) { * that another experiment with the same name does not already exist and fails if another * experiment with the same name already exists. * - *

Throws `RESOURCE_ALREADY_EXISTS` if a experiment with the given name exists. + *

Throws `RESOURCE_ALREADY_EXISTS` if an experiment with the given name exists. */ public CreateExperimentResponse createExperiment(CreateExperiment request) { return impl.createExperiment(request); @@ -53,7 +53,7 @@ public CreateExperimentResponse createExperiment(CreateExperiment request) { * Create a run. * *

Creates a new run within an experiment. A run is usually a single execution of a machine - * learning or data ETL pipeline. MLflow uses runs to track the `mlflowParam`, `mlflowMetric` and + * learning or data ETL pipeline. MLflow uses runs to track the `mlflowParam`, `mlflowMetric`, and * `mlflowRunTag` associated with a single execution. */ public CreateRunResponse createRun(CreateRun request) { @@ -68,7 +68,7 @@ public void deleteExperiment(String experimentId) { * Delete an experiment. * *

Marks an experiment and associated metadata, runs, metrics, params, and tags for deletion. - * If the experiment uses FileStore, artifacts associated with experiment are also deleted. + * If the experiment uses FileStore, artifacts associated with the experiment are also deleted. */ public void deleteExperiment(DeleteExperiment request) { impl.deleteExperiment(request); @@ -98,7 +98,6 @@ public DeleteRunsResponse deleteRuns(String experimentId, long maxTimestampMilli *

Bulk delete runs in an experiment that were created prior to or at the specified timestamp. * Deletes at most max_runs per request. To call this API from a Databricks Notebook in Python, * you can use the client code snippet on - * https://learn.microsoft.com/en-us/azure/databricks/mlflow/runs#bulk-delete. */ public DeleteRunsResponse deleteRuns(DeleteRuns request) { return impl.deleteRuns(request); @@ -109,7 +108,7 @@ public void deleteTag(String runId, String key) { } /** - * Delete a tag. + * Delete a tag on a run. * *

Deletes a tag on a run. Tags are run metadata that can be updated during a run and after a * run completes. @@ -118,12 +117,12 @@ public void deleteTag(DeleteTag request) { impl.deleteTag(request); } - public GetExperimentResponse getByName(String experimentName) { + public GetExperimentByNameResponse getByName(String experimentName) { return getByName(new GetByNameRequest().setExperimentName(experimentName)); } /** - * Get metadata. + * Get an experiment by name. * *

Gets metadata for an experiment. * @@ -133,7 +132,7 @@ public GetExperimentResponse getByName(String experimentName) { * *

Throws `RESOURCE_DOES_NOT_EXIST` if no experiment with the specified name exists. */ - public GetExperimentResponse getByName(GetByNameRequest request) { + public GetExperimentByNameResponse getByName(GetByNameRequest request) { return impl.getByName(request); } @@ -155,7 +154,7 @@ public Iterable getHistory(String metricKey) { } /** - * Get history of a given metric within a run. + * Get metric history for a run. * *

Gets a list of all values for the specified metric for a given run. */ @@ -219,13 +218,12 @@ public GetRunResponse getRun(GetRunRequest request) { } /** - * Get all artifacts. + * List artifacts. * - *

List artifacts for a run. Takes an optional `artifact_path` prefix. If it is specified, the - * response contains only artifacts with the specified prefix. This API does not support - * pagination when listing artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved - * for UC Volumes. Please call `/api/2.0/fs/directories{directory_path}` for listing artifacts in - * UC Volumes, which supports pagination. See [List directory contents | Files + *

List artifacts for a run. Takes an optional `artifact_path` prefix which if specified, the + * response contains only artifacts with the specified prefix. A maximum of 1000 artifacts will be + * retrieved for UC Volumes. Please call `/api/2.0/fs/directories{directory_path}` for listing + * artifacts in UC Volumes, which supports pagination. See [List directory contents | Files * API](/api/workspace/files/listdirectorycontents). */ public Iterable listArtifacts(ListArtifactsRequest request) { @@ -262,7 +260,7 @@ public Iterable listExperiments(ListExperimentsRequest request) { } /** - * Log a batch. + * Log a batch of metrics/params/tags for a run. * *

Logs a batch of metrics, params, and tags for a run. If any data failed to be persisted, the * server will respond with an error (non-200 status code). @@ -290,26 +288,38 @@ public Iterable listExperiments(ListExperimentsRequest request) { *

Request Limits ------------------------------- A single JSON-serialized API request may be * up to 1 MB in size and contain: * - *

* No more than 1000 metrics, params, and tags in total * Up to 1000 metrics * Up to 100 - * params * Up to 100 tags + *

* No more than 1000 metrics, params, and tags in total + * + *

* Up to 1000 metrics + * + *

* Up to 100 params + * + *

* Up to 100 tags * *

For example, a valid request might contain 900 metrics, 50 params, and 50 tags, but logging * 900 metrics, 50 params, and 51 tags is invalid. * *

The following limits also apply to metric, param, and tag keys and values: * - *

* Metric keys, param keys, and tag keys can be up to 250 characters in length * Parameter - * and tag values can be up to 250 characters in length + *

* Metric keys, param keys, and tag keys can be up to 250 characters in length + * + *

* Parameter and tag values can be up to 250 characters in length */ public void logBatch(LogBatch request) { impl.logBatch(request); } + public void logInputs(String runId) { + logInputs(new LogInputs().setRunId(runId)); + } + /** * Log inputs to a run. * *

**NOTE:** Experimental: This API may change or be removed in a future release without * warning. + * + *

Logs inputs, such as datasets and models, to an MLflow Run. */ public void logInputs(LogInputs request) { impl.logInputs(request); @@ -320,9 +330,9 @@ public void logMetric(String key, double value, long timestamp) { } /** - * Log a metric. + * Log a metric for a run. * - *

Logs a metric for a run. A metric is a key-value pair (string key, float value) with an + *

Log a metric for a run. A metric is a key-value pair (string key, float value) with an * associated timestamp. Examples include the various metrics that represent ML model accuracy. A * metric can be logged multiple times. */ @@ -345,7 +355,7 @@ public void logParam(String key, String value) { } /** - * Log a param. + * Log a param for a run. * *

Logs a param used for a run. A param is a key-value pair (string key, string value). * Examples include hyperparameters used for ML model training and constant dates and values used @@ -360,7 +370,7 @@ public void restoreExperiment(String experimentId) { } /** - * Restores an experiment. + * Restore an experiment. * *

Restore an experiment marked for deletion. This also restores associated metadata, runs, * metrics, params, and tags. If experiment uses FileStore, underlying artifacts associated with @@ -379,7 +389,10 @@ public void restoreRun(String runId) { /** * Restore a run. * - *

Restores a deleted run. + *

Restores a deleted run. This also restores associated metadata, runs, metrics, params, and + * tags. + * + *

Throws `RESOURCE_DOES_NOT_EXIST` if the run was never created or was permanently deleted. */ public void restoreRun(RestoreRun request) { impl.restoreRun(request); @@ -396,7 +409,6 @@ public RestoreRunsResponse restoreRuns(String experimentId, long minTimestampMil *

Bulk restore runs in an experiment that were deleted no earlier than the specified * timestamp. Restores at most max_runs per request. To call this API from a Databricks Notebook * in Python, you can use the client code snippet on - * https://learn.microsoft.com/en-us/azure/databricks/mlflow/runs#bulk-restore. */ public RestoreRunsResponse restoreRuns(RestoreRuns request) { return impl.restoreRuns(request); @@ -426,7 +438,7 @@ public Iterable searchExperiments(SearchExperiments request) { * *

Searches for runs that satisfy expressions. * - *

Search expressions can use `mlflowMetric` and `mlflowParam` keys.", + *

Search expressions can use `mlflowMetric` and `mlflowParam` keys. */ public Iterable searchRuns(SearchRuns request) { return new Paginator<>( @@ -448,7 +460,7 @@ public void setExperimentTag(String experimentId, String key, String value) { } /** - * Set a tag. + * Set a tag for an experiment. * *

Sets a tag on an experiment. Experiment tags are metadata that can be updated. */ @@ -476,7 +488,7 @@ public void setTag(String key, String value) { } /** - * Set a tag. + * Set a tag for a run. * *

Sets a tag on a run. Tags are run metadata that can be updated during a run and after a run * completes. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java index d6c119d10..d8a23c091 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java @@ -101,13 +101,13 @@ public void deleteTag(DeleteTag request) { } @Override - public GetExperimentResponse getByName(GetByNameRequest request) { + public GetExperimentByNameResponse getByName(GetByNameRequest request) { String path = "/api/2.0/mlflow/experiments/get-by-name"; try { Request req = new Request("GET", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - return apiClient.execute(req, GetExperimentResponse.class); + return apiClient.execute(req, GetExperimentByNameResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java index 323c848c4..43efb3908 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java @@ -25,7 +25,7 @@ public interface ExperimentsService { * that another experiment with the same name does not already exist and fails if another * experiment with the same name already exists. * - *

Throws `RESOURCE_ALREADY_EXISTS` if a experiment with the given name exists. + *

Throws `RESOURCE_ALREADY_EXISTS` if an experiment with the given name exists. */ CreateExperimentResponse createExperiment(CreateExperiment createExperiment); @@ -33,7 +33,7 @@ public interface ExperimentsService { * Create a run. * *

Creates a new run within an experiment. A run is usually a single execution of a machine - * learning or data ETL pipeline. MLflow uses runs to track the `mlflowParam`, `mlflowMetric` and + * learning or data ETL pipeline. MLflow uses runs to track the `mlflowParam`, `mlflowMetric`, and * `mlflowRunTag` associated with a single execution. */ CreateRunResponse createRun(CreateRun createRun); @@ -42,7 +42,7 @@ public interface ExperimentsService { * Delete an experiment. * *

Marks an experiment and associated metadata, runs, metrics, params, and tags for deletion. - * If the experiment uses FileStore, artifacts associated with experiment are also deleted. + * If the experiment uses FileStore, artifacts associated with the experiment are also deleted. */ void deleteExperiment(DeleteExperiment deleteExperiment); @@ -59,12 +59,11 @@ public interface ExperimentsService { *

Bulk delete runs in an experiment that were created prior to or at the specified timestamp. * Deletes at most max_runs per request. To call this API from a Databricks Notebook in Python, * you can use the client code snippet on - * https://learn.microsoft.com/en-us/azure/databricks/mlflow/runs#bulk-delete. */ DeleteRunsResponse deleteRuns(DeleteRuns deleteRuns); /** - * Delete a tag. + * Delete a tag on a run. * *

Deletes a tag on a run. Tags are run metadata that can be updated during a run and after a * run completes. @@ -72,7 +71,7 @@ public interface ExperimentsService { void deleteTag(DeleteTag deleteTag); /** - * Get metadata. + * Get an experiment by name. * *

Gets metadata for an experiment. * @@ -82,7 +81,7 @@ public interface ExperimentsService { * *

Throws `RESOURCE_DOES_NOT_EXIST` if no experiment with the specified name exists. */ - GetExperimentResponse getByName(GetByNameRequest getByNameRequest); + GetExperimentByNameResponse getByName(GetByNameRequest getByNameRequest); /** * Get an experiment. @@ -92,7 +91,7 @@ public interface ExperimentsService { GetExperimentResponse getExperiment(GetExperimentRequest getExperimentRequest); /** - * Get history of a given metric within a run. + * Get metric history for a run. * *

Gets a list of all values for the specified metric for a given run. */ @@ -126,13 +125,12 @@ ExperimentPermissions getPermissions( GetRunResponse getRun(GetRunRequest getRunRequest); /** - * Get all artifacts. + * List artifacts. * - *

List artifacts for a run. Takes an optional `artifact_path` prefix. If it is specified, the - * response contains only artifacts with the specified prefix. This API does not support - * pagination when listing artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved - * for UC Volumes. Please call `/api/2.0/fs/directories{directory_path}` for listing artifacts in - * UC Volumes, which supports pagination. See [List directory contents | Files + *

List artifacts for a run. Takes an optional `artifact_path` prefix which if specified, the + * response contains only artifacts with the specified prefix. A maximum of 1000 artifacts will be + * retrieved for UC Volumes. Please call `/api/2.0/fs/directories{directory_path}` for listing + * artifacts in UC Volumes, which supports pagination. See [List directory contents | Files * API](/api/workspace/files/listdirectorycontents). */ ListArtifactsResponse listArtifacts(ListArtifactsRequest listArtifactsRequest); @@ -145,7 +143,7 @@ ExperimentPermissions getPermissions( ListExperimentsResponse listExperiments(ListExperimentsRequest listExperimentsRequest); /** - * Log a batch. + * Log a batch of metrics/params/tags for a run. * *

Logs a batch of metrics, params, and tags for a run. If any data failed to be persisted, the * server will respond with an error (non-200 status code). @@ -173,16 +171,22 @@ ExperimentPermissions getPermissions( *

Request Limits ------------------------------- A single JSON-serialized API request may be * up to 1 MB in size and contain: * - *

* No more than 1000 metrics, params, and tags in total * Up to 1000 metrics * Up to 100 - * params * Up to 100 tags + *

* No more than 1000 metrics, params, and tags in total + * + *

* Up to 1000 metrics + * + *

* Up to 100 params + * + *

* Up to 100 tags * *

For example, a valid request might contain 900 metrics, 50 params, and 50 tags, but logging * 900 metrics, 50 params, and 51 tags is invalid. * *

The following limits also apply to metric, param, and tag keys and values: * - *

* Metric keys, param keys, and tag keys can be up to 250 characters in length * Parameter - * and tag values can be up to 250 characters in length + *

* Metric keys, param keys, and tag keys can be up to 250 characters in length + * + *

* Parameter and tag values can be up to 250 characters in length */ void logBatch(LogBatch logBatch); @@ -191,13 +195,15 @@ ExperimentPermissions getPermissions( * *

**NOTE:** Experimental: This API may change or be removed in a future release without * warning. + * + *

Logs inputs, such as datasets and models, to an MLflow Run. */ void logInputs(LogInputs logInputs); /** - * Log a metric. + * Log a metric for a run. * - *

Logs a metric for a run. A metric is a key-value pair (string key, float value) with an + *

Log a metric for a run. A metric is a key-value pair (string key, float value) with an * associated timestamp. Examples include the various metrics that represent ML model accuracy. A * metric can be logged multiple times. */ @@ -212,7 +218,7 @@ ExperimentPermissions getPermissions( void logModel(LogModel logModel); /** - * Log a param. + * Log a param for a run. * *

Logs a param used for a run. A param is a key-value pair (string key, string value). * Examples include hyperparameters used for ML model training and constant dates and values used @@ -221,7 +227,7 @@ ExperimentPermissions getPermissions( void logParam(LogParam logParam); /** - * Restores an experiment. + * Restore an experiment. * *

Restore an experiment marked for deletion. This also restores associated metadata, runs, * metrics, params, and tags. If experiment uses FileStore, underlying artifacts associated with @@ -234,7 +240,10 @@ ExperimentPermissions getPermissions( /** * Restore a run. * - *

Restores a deleted run. + *

Restores a deleted run. This also restores associated metadata, runs, metrics, params, and + * tags. + * + *

Throws `RESOURCE_DOES_NOT_EXIST` if the run was never created or was permanently deleted. */ void restoreRun(RestoreRun restoreRun); @@ -244,7 +253,6 @@ ExperimentPermissions getPermissions( *

Bulk restore runs in an experiment that were deleted no earlier than the specified * timestamp. Restores at most max_runs per request. To call this API from a Databricks Notebook * in Python, you can use the client code snippet on - * https://learn.microsoft.com/en-us/azure/databricks/mlflow/runs#bulk-restore. */ RestoreRunsResponse restoreRuns(RestoreRuns restoreRuns); @@ -260,12 +268,12 @@ ExperimentPermissions getPermissions( * *

Searches for runs that satisfy expressions. * - *

Search expressions can use `mlflowMetric` and `mlflowParam` keys.", + *

Search expressions can use `mlflowMetric` and `mlflowParam` keys. */ SearchRunsResponse searchRuns(SearchRuns searchRuns); /** - * Set a tag. + * Set a tag for an experiment. * *

Sets a tag on an experiment. Experiment tags are metadata that can be updated. */ @@ -281,7 +289,7 @@ ExperimentPermissions getPermissions( ExperimentPermissions setPermissions(ExperimentPermissionsRequest experimentPermissionsRequest); /** - * Set a tag. + * Set a tag for a run. * *

Sets a tag on a run. Tags are run metadata that can be updated during a run and after a run * completes. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FileInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FileInfo.java index b719b15e7..03278d8e4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FileInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FileInfo.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Metadata of a single artifact file or directory. */ @Generated public class FileInfo { /** Size in bytes. Unset for directories. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingAPI.java new file mode 100755 index 000000000..e080315b6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingAPI.java @@ -0,0 +1,126 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Wait; +import java.time.Duration; +import java.util.Arrays; +import java.util.concurrent.TimeoutException; +import java.util.function.Consumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** The Forecasting API allows you to create and get serverless forecasting experiments */ +@Generated +public class ForecastingAPI { + private static final Logger LOG = LoggerFactory.getLogger(ForecastingAPI.class); + + private final ForecastingService impl; + + /** Regular-use constructor */ + public ForecastingAPI(ApiClient apiClient) { + impl = new ForecastingImpl(apiClient); + } + + /** Constructor for mocks */ + public ForecastingAPI(ForecastingService mock) { + impl = mock; + } + + public ForecastingExperiment waitGetExperimentForecastingSucceeded(String experimentId) + throws TimeoutException { + return waitGetExperimentForecastingSucceeded(experimentId, Duration.ofMinutes(120), null); + } + + public ForecastingExperiment waitGetExperimentForecastingSucceeded( + String experimentId, Duration timeout, Consumer callback) + throws TimeoutException { + long deadline = System.currentTimeMillis() + timeout.toMillis(); + java.util.List targetStates = + Arrays.asList(ForecastingExperimentState.SUCCEEDED); + java.util.List failureStates = + Arrays.asList(ForecastingExperimentState.FAILED, ForecastingExperimentState.CANCELLED); + String statusMessage = "polling..."; + int attempt = 1; + while (System.currentTimeMillis() < deadline) { + ForecastingExperiment poll = + getExperiment(new GetForecastingExperimentRequest().setExperimentId(experimentId)); + ForecastingExperimentState status = poll.getState(); + statusMessage = String.format("current status: %s", status); + if (targetStates.contains(status)) { + return poll; + } + if (callback != null) { + callback.accept(poll); + } + if (failureStates.contains(status)) { + String msg = String.format("failed to reach SUCCEEDED, got %s: %s", status, statusMessage); + throw new IllegalStateException(msg); + } + + String prefix = String.format("experimentId=%s", experimentId); + int sleep = attempt; + if (sleep > 10) { + // sleep 10s max per attempt + sleep = 10; + } + LOG.info("{}: ({}) {} (sleeping ~{}s)", prefix, status, statusMessage, sleep); + try { + Thread.sleep((long) (sleep * 1000L + Math.random() * 1000)); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new DatabricksException("Current thread was interrupted", e); + } + attempt++; + } + throw new TimeoutException(String.format("timed out after %s: %s", timeout, statusMessage)); + } + + public Wait createExperiment( + String trainDataPath, + String targetColumn, + String timeColumn, + String dataGranularityUnit, + long forecastHorizon) { + return createExperiment( + new CreateForecastingExperimentRequest() + .setTrainDataPath(trainDataPath) + .setTargetColumn(targetColumn) + .setTimeColumn(timeColumn) + .setDataGranularityUnit(dataGranularityUnit) + .setForecastHorizon(forecastHorizon)); + } + + /** + * Create a forecasting experiment. + * + *

Creates a serverless forecasting experiment. Returns the experiment ID. + */ + public Wait createExperiment( + CreateForecastingExperimentRequest request) { + CreateForecastingExperimentResponse response = impl.createExperiment(request); + return new Wait<>( + (timeout, callback) -> + waitGetExperimentForecastingSucceeded(response.getExperimentId(), timeout, callback), + response); + } + + public ForecastingExperiment getExperiment(String experimentId) { + return getExperiment(new GetForecastingExperimentRequest().setExperimentId(experimentId)); + } + + /** + * Get a forecasting experiment. + * + *

Public RPC to get forecasting experiment + */ + public ForecastingExperiment getExperiment(GetForecastingExperimentRequest request) { + return impl.getExperiment(request); + } + + public ForecastingService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingExperiment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingExperiment.java new file mode 100755 index 000000000..02d5fd61b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingExperiment.java @@ -0,0 +1,75 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Represents a forecasting experiment with its unique identifier, URL, and state. */ +@Generated +public class ForecastingExperiment { + /** The unique ID for the forecasting experiment. */ + @JsonProperty("experiment_id") + private String experimentId; + + /** The URL to the forecasting experiment page. */ + @JsonProperty("experiment_page_url") + private String experimentPageUrl; + + /** The current state of the forecasting experiment. */ + @JsonProperty("state") + private ForecastingExperimentState state; + + public ForecastingExperiment setExperimentId(String experimentId) { + this.experimentId = experimentId; + return this; + } + + public String getExperimentId() { + return experimentId; + } + + public ForecastingExperiment setExperimentPageUrl(String experimentPageUrl) { + this.experimentPageUrl = experimentPageUrl; + return this; + } + + public String getExperimentPageUrl() { + return experimentPageUrl; + } + + public ForecastingExperiment setState(ForecastingExperimentState state) { + this.state = state; + return this; + } + + public ForecastingExperimentState getState() { + return state; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ForecastingExperiment that = (ForecastingExperiment) o; + return Objects.equals(experimentId, that.experimentId) + && Objects.equals(experimentPageUrl, that.experimentPageUrl) + && Objects.equals(state, that.state); + } + + @Override + public int hashCode() { + return Objects.hash(experimentId, experimentPageUrl, state); + } + + @Override + public String toString() { + return new ToStringer(ForecastingExperiment.class) + .add("experimentId", experimentId) + .add("experimentPageUrl", experimentPageUrl) + .add("state", state) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingExperimentState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingExperimentState.java new file mode 100755 index 000000000..48ada8060 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingExperimentState.java @@ -0,0 +1,14 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum ForecastingExperimentState { + CANCELLED, + FAILED, + PENDING, + RUNNING, + SUCCEEDED, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingImpl.java new file mode 100755 index 000000000..686aad0f4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingImpl.java @@ -0,0 +1,47 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.support.Generated; +import java.io.IOException; + +/** Package-local implementation of Forecasting */ +@Generated +class ForecastingImpl implements ForecastingService { + private final ApiClient apiClient; + + public ForecastingImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public CreateForecastingExperimentResponse createExperiment( + CreateForecastingExperimentRequest request) { + String path = "/api/2.0/automl/create-forecasting-experiment"; + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateForecastingExperimentResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ForecastingExperiment getExperiment(GetForecastingExperimentRequest request) { + String path = + String.format("/api/2.0/automl/get-forecasting-experiment/%s", request.getExperimentId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ForecastingExperiment.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingService.java new file mode 100755 index 000000000..f525514e2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingService.java @@ -0,0 +1,30 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; + +/** + * The Forecasting API allows you to create and get serverless forecasting experiments + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface ForecastingService { + /** + * Create a forecasting experiment. + * + *

Creates a serverless forecasting experiment. Returns the experiment ID. + */ + CreateForecastingExperimentResponse createExperiment( + CreateForecastingExperimentRequest createForecastingExperimentRequest); + + /** + * Get a forecasting experiment. + * + *

Public RPC to get forecasting experiment + */ + ForecastingExperiment getExperiment( + GetForecastingExperimentRequest getForecastingExperimentRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetByNameRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetByNameRequest.java index 7d1376a08..b3e7b7da2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetByNameRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetByNameRequest.java @@ -8,7 +8,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; -/** Get metadata */ +/** Get an experiment by name */ @Generated public class GetByNameRequest { /** Name of the associated experiment. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentByNameResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentByNameResponse.java new file mode 100755 index 000000000..c8c778488 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentByNameResponse.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class GetExperimentByNameResponse { + /** Experiment details. */ + @JsonProperty("experiment") + private Experiment experiment; + + public GetExperimentByNameResponse setExperiment(Experiment experiment) { + this.experiment = experiment; + return this; + } + + public Experiment getExperiment() { + return experiment; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetExperimentByNameResponse that = (GetExperimentByNameResponse) o; + return Objects.equals(experiment, that.experiment); + } + + @Override + public int hashCode() { + return Objects.hash(experiment); + } + + @Override + public String toString() { + return new ToStringer(GetExperimentByNameResponse.class) + .add("experiment", experiment) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetForecastingExperimentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetForecastingExperimentRequest.java new file mode 100755 index 000000000..bf12ad25a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetForecastingExperimentRequest.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a forecasting experiment */ +@Generated +public class GetForecastingExperimentRequest { + /** The unique ID of a forecasting experiment */ + @JsonIgnore private String experimentId; + + public GetForecastingExperimentRequest setExperimentId(String experimentId) { + this.experimentId = experimentId; + return this; + } + + public String getExperimentId() { + return experimentId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetForecastingExperimentRequest that = (GetForecastingExperimentRequest) o; + return Objects.equals(experimentId, that.experimentId); + } + + @Override + public int hashCode() { + return Objects.hash(experimentId); + } + + @Override + public String toString() { + return new ToStringer(GetForecastingExperimentRequest.class) + .add("experimentId", experimentId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetHistoryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetHistoryRequest.java index f2041e440..60f597c20 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetHistoryRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetHistoryRequest.java @@ -8,7 +8,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; -/** Get history of a given metric within a run */ +/** Get metric history for a run */ @Generated public class GetHistoryRequest { /** @@ -35,7 +35,7 @@ public class GetHistoryRequest { private String runId; /** - * [Deprecated, use run_id instead] ID of the run from which to fetch metric values. This field + * [Deprecated, use `run_id` instead] ID of the run from which to fetch metric values. This field * will be removed in a future MLflow version. */ @JsonIgnore diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetMetricHistoryResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetMetricHistoryResponse.java index 54a957017..22c1f2388 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetMetricHistoryResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetMetricHistoryResponse.java @@ -10,11 +10,18 @@ @Generated public class GetMetricHistoryResponse { - /** All logged values for this metric. */ + /** + * All logged values for this metric if `max_results` is not specified in the request or if the + * total count of metrics returned is less than the service level pagination threshold. Otherwise, + * this is one page of results. + */ @JsonProperty("metrics") private Collection metrics; - /** Token that can be used to retrieve the next page of metric history results */ + /** + * A token that can be used to issue a query for the next page of metric history values. A missing + * token indicates that no additional metrics are available to fetch. + */ @JsonProperty("next_page_token") private String nextPageToken; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRunRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRunRequest.java index a268d2ef0..3206a8d1a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRunRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRunRequest.java @@ -17,8 +17,8 @@ public class GetRunRequest { private String runId; /** - * [Deprecated, use run_id instead] ID of the run to fetch. This field will be removed in a future - * MLflow version. + * [Deprecated, use `run_id` instead] ID of the run to fetch. This field will be removed in a + * future MLflow version. */ @JsonIgnore @QueryParam("run_uuid") diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/InputTag.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/InputTag.java index 79722357b..1ccaceeae 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/InputTag.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/InputTag.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Tag for a dataset input. */ @Generated public class InputTag { /** The tag key. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsRequest.java index 6c7f11c6b..d7a68b8ed 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsRequest.java @@ -8,7 +8,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; -/** Get all artifacts */ +/** List artifacts */ @Generated public class ListArtifactsRequest { /** @@ -33,7 +33,7 @@ public class ListArtifactsRequest { private String runId; /** - * [Deprecated, use run_id instead] ID of the run whose artifacts to list. This field will be + * [Deprecated, use `run_id` instead] ID of the run whose artifacts to list. This field will be * removed in a future MLflow version. */ @JsonIgnore diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListExperimentsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListExperimentsRequest.java index 7a750cbe1..f3b09e157 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListExperimentsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListExperimentsRequest.java @@ -32,7 +32,7 @@ public class ListExperimentsRequest { */ @JsonIgnore @QueryParam("view_type") - private String viewType; + private ViewType viewType; public ListExperimentsRequest setMaxResults(Long maxResults) { this.maxResults = maxResults; @@ -52,12 +52,12 @@ public String getPageToken() { return pageToken; } - public ListExperimentsRequest setViewType(String viewType) { + public ListExperimentsRequest setViewType(ViewType viewType) { this.viewType = viewType; return this; } - public String getViewType() { + public ViewType getViewType() { return viewType; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetric.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetric.java index a36cbd409..5182201be 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetric.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetric.java @@ -18,7 +18,7 @@ public class LogMetric { private String runId; /** - * [Deprecated, use run_id instead] ID of the run under which to log the metric. This field will + * [Deprecated, use `run_id` instead] ID of the run under which to log the metric. This field will * be removed in a future MLflow version. */ @JsonProperty("run_uuid") diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogParam.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogParam.java index 723980707..3ea5a20ed 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogParam.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogParam.java @@ -18,8 +18,8 @@ public class LogParam { private String runId; /** - * [Deprecated, use run_id instead] ID of the run under which to log the param. This field will be - * removed in a future MLflow version. + * [Deprecated, use `run_id` instead] ID of the run under which to log the param. This field will + * be removed in a future MLflow version. */ @JsonProperty("run_uuid") private String runUuid; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Metric.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Metric.java index 0ee95106e..e4bcb3557 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Metric.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Metric.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Metric associated with a run, represented as a key-value pair. */ @Generated public class Metric { /** Key identifying this metric. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Param.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Param.java index c7f06a29c..9f8444d9d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Param.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Param.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Param associated with a run. */ @Generated public class Param { /** Key identifying this param. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Run.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Run.java index ac2c3ba41..f914c7738 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Run.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Run.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** A single run. */ @Generated public class Run { /** Run data. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunData.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunData.java index f30f3178c..b7aa224f0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunData.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunData.java @@ -8,6 +8,7 @@ import java.util.Collection; import java.util.Objects; +/** Run data (metrics, params, and tags). */ @Generated public class RunData { /** Run metrics. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInfo.java index 2d53b133b..ee4b2f388 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInfo.java @@ -7,12 +7,13 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Metadata of a single run. */ @Generated public class RunInfo { /** * URI of the directory where artifacts should be uploaded. This can be a local path (starting - * with "/"), or a distributed file system (DFS) path, like `s3://bucket/directory` or - * `dbfs:/my/directory`. If not set, the local `./mlruns` directory is chosen. + * with "/"), or a distributed file system (DFS) path, like ``s3://bucket/directory`` or + * ``dbfs:/my/directory``. If not set, the local ``./mlruns`` directory is chosen. */ @JsonProperty("artifact_uri") private String artifactUri; @@ -33,6 +34,10 @@ public class RunInfo { @JsonProperty("run_id") private String runId; + /** The name of the run. */ + @JsonProperty("run_name") + private String runName; + /** * [Deprecated, use run_id instead] Unique identifier for the run. This field will be removed in a * future MLflow version. @@ -100,6 +105,15 @@ public String getRunId() { return runId; } + public RunInfo setRunName(String runName) { + this.runName = runName; + return this; + } + + public String getRunName() { + return runName; + } + public RunInfo setRunUuid(String runUuid) { this.runUuid = runUuid; return this; @@ -146,6 +160,7 @@ public boolean equals(Object o) { && Objects.equals(experimentId, that.experimentId) && Objects.equals(lifecycleStage, that.lifecycleStage) && Objects.equals(runId, that.runId) + && Objects.equals(runName, that.runName) && Objects.equals(runUuid, that.runUuid) && Objects.equals(startTime, that.startTime) && Objects.equals(status, that.status) @@ -160,6 +175,7 @@ public int hashCode() { experimentId, lifecycleStage, runId, + runName, runUuid, startTime, status, @@ -174,6 +190,7 @@ public String toString() { .add("experimentId", experimentId) .add("lifecycleStage", lifecycleStage) .add("runId", runId) + .add("runName", runName) .add("runUuid", runUuid) .add("startTime", startTime) .add("status", status) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInfoStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInfoStatus.java index f8039ae65..300badee4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInfoStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInfoStatus.java @@ -4,7 +4,7 @@ import com.databricks.sdk.support.Generated; -/** Current status of the run. */ +/** Status of a run. */ @Generated public enum RunInfoStatus { FAILED, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputs.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputs.java index 681f340f4..b90b79df7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputs.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputs.java @@ -8,6 +8,7 @@ import java.util.Collection; import java.util.Objects; +/** Run inputs. */ @Generated public class RunInputs { /** Run metrics. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunTag.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunTag.java index 224473d7f..5e029636c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunTag.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunTag.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Tag for a run. */ @Generated public class RunTag { /** The tag key. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperiments.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperiments.java index b98312a18..e1cc5aaec 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperiments.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperiments.java @@ -35,7 +35,7 @@ public class SearchExperiments { * experiments. */ @JsonProperty("view_type") - private SearchExperimentsViewType viewType; + private ViewType viewType; public SearchExperiments setFilter(String filter) { this.filter = filter; @@ -73,12 +73,12 @@ public String getPageToken() { return pageToken; } - public SearchExperiments setViewType(SearchExperimentsViewType viewType) { + public SearchExperiments setViewType(ViewType viewType) { this.viewType = viewType; return this; } - public SearchExperimentsViewType getViewType() { + public ViewType getViewType() { return viewType; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperimentsViewType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperimentsViewType.java deleted file mode 100755 index 70c2747b8..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperimentsViewType.java +++ /dev/null @@ -1,15 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; - -/** - * Qualifier for type of experiments to be returned. If unspecified, return only active experiments. - */ -@Generated -public enum SearchExperimentsViewType { - ACTIVE_ONLY, - ALL, - DELETED_ONLY, -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchRuns.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchRuns.java index 8cd4c14e0..89882d591 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchRuns.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchRuns.java @@ -35,10 +35,10 @@ public class SearchRuns { /** * List of columns to be ordered by, including attributes, params, metrics, and tags with an - * optional "DESC" or "ASC" annotation, where "ASC" is the default. Example: ["params.input DESC", - * "metrics.alpha ASC", "metrics.rmse"] Tiebreaks are done by start_time DESC followed by run_id - * for runs with the same start time (and this is the default ordering criterion if order_by is - * not provided). + * optional `"DESC"` or `"ASC"` annotation, where `"ASC"` is the default. Example: `["params.input + * DESC", "metrics.alpha ASC", "metrics.rmse"]`. Tiebreaks are done by start_time `DESC` followed + * by `run_id` for runs with the same start time (and this is the default ordering criterion if + * order_by is not provided). */ @JsonProperty("order_by") private Collection orderBy; @@ -49,7 +49,7 @@ public class SearchRuns { /** Whether to display only active, only deleted, or all runs. Defaults to only active runs. */ @JsonProperty("run_view_type") - private SearchRunsRunViewType runViewType; + private ViewType runViewType; public SearchRuns setExperimentIds(Collection experimentIds) { this.experimentIds = experimentIds; @@ -96,12 +96,12 @@ public String getPageToken() { return pageToken; } - public SearchRuns setRunViewType(SearchRunsRunViewType runViewType) { + public SearchRuns setRunViewType(ViewType runViewType) { this.runViewType = runViewType; return this; } - public SearchRunsRunViewType getRunViewType() { + public ViewType getRunViewType() { return runViewType; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTag.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTag.java index fedbed02a..4f4189737 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTag.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTag.java @@ -13,17 +13,11 @@ public class SetExperimentTag { @JsonProperty("experiment_id") private String experimentId; - /** - * Name of the tag. Maximum size depends on storage backend. All storage backends are guaranteed - * to support key values up to 250 bytes in size. - */ + /** Name of the tag. Keys up to 250 bytes in size are supported. */ @JsonProperty("key") private String key; - /** - * String value of the tag being logged. Maximum size depends on storage backend. All storage - * backends are guaranteed to support key values up to 5000 bytes in size. - */ + /** String value of the tag being logged. Values up to 64KB in size are supported. */ @JsonProperty("value") private String value; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetTag.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetTag.java index c8231b813..71795835f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetTag.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetTag.java @@ -9,10 +9,7 @@ @Generated public class SetTag { - /** - * Name of the tag. Maximum size depends on storage backend. All storage backends are guaranteed - * to support key values up to 250 bytes in size. - */ + /** Name of the tag. Keys up to 250 bytes in size are supported. */ @JsonProperty("key") private String key; @@ -21,16 +18,13 @@ public class SetTag { private String runId; /** - * [Deprecated, use run_id instead] ID of the run under which to log the tag. This field will be + * [Deprecated, use `run_id` instead] ID of the run under which to log the tag. This field will be * removed in a future MLflow version. */ @JsonProperty("run_uuid") private String runUuid; - /** - * String value of the tag being logged. Maximum size depends on storage backend. All storage - * backends are guaranteed to support key values up to 5000 bytes in size. - */ + /** String value of the tag being logged. Values up to 64KB in size are supported. */ @JsonProperty("value") private String value; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRun.java index 09b8020be..738e2d982 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRun.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRun.java @@ -17,8 +17,12 @@ public class UpdateRun { @JsonProperty("run_id") private String runId; + /** Updated name of the run. */ + @JsonProperty("run_name") + private String runName; + /** - * [Deprecated, use run_id instead] ID of the run to update.. This field will be removed in a + * [Deprecated, use `run_id` instead] ID of the run to update. This field will be removed in a * future MLflow version. */ @JsonProperty("run_uuid") @@ -46,6 +50,15 @@ public String getRunId() { return runId; } + public UpdateRun setRunName(String runName) { + this.runName = runName; + return this; + } + + public String getRunName() { + return runName; + } + public UpdateRun setRunUuid(String runUuid) { this.runUuid = runUuid; return this; @@ -71,13 +84,14 @@ public boolean equals(Object o) { UpdateRun that = (UpdateRun) o; return Objects.equals(endTime, that.endTime) && Objects.equals(runId, that.runId) + && Objects.equals(runName, that.runName) && Objects.equals(runUuid, that.runUuid) && Objects.equals(status, that.status); } @Override public int hashCode() { - return Objects.hash(endTime, runId, runUuid, status); + return Objects.hash(endTime, runId, runName, runUuid, status); } @Override @@ -85,6 +99,7 @@ public String toString() { return new ToStringer(UpdateRun.class) .add("endTime", endTime) .add("runId", runId) + .add("runName", runName) .add("runUuid", runUuid) .add("status", status) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRunStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRunStatus.java index 84252ecbc..bc219452d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRunStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRunStatus.java @@ -4,7 +4,7 @@ import com.databricks.sdk.support.Generated; -/** Updated status of the run. */ +/** Status of a run. */ @Generated public enum UpdateRunStatus { FAILED, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchRunsRunViewType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ViewType.java similarity index 62% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchRunsRunViewType.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ViewType.java index 1eed86324..cc2255771 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchRunsRunViewType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ViewType.java @@ -4,9 +4,9 @@ import com.databricks.sdk.support.Generated; -/** Whether to display only active, only deleted, or all runs. Defaults to only active runs. */ +/** Qualifier for the view type. */ @Generated -public enum SearchRunsRunViewType { +public enum ViewType { ACTIVE_ONLY, ALL, DELETED_ONLY, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalSecretRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalSecretRequest.java index 66990d88c..ea5a42d69 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalSecretRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalSecretRequest.java @@ -5,14 +5,30 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; -/** Create service principal secret */ @Generated public class CreateServicePrincipalSecretRequest { + /** + * The lifetime of the secret in seconds. If this parameter is not provided, the secret will have + * a default lifetime of 730 days (63072000s). + */ + @JsonProperty("lifetime") + private String lifetime; + /** The service principal ID. */ @JsonIgnore private Long servicePrincipalId; + public CreateServicePrincipalSecretRequest setLifetime(String lifetime) { + this.lifetime = lifetime; + return this; + } + + public String getLifetime() { + return lifetime; + } + public CreateServicePrincipalSecretRequest setServicePrincipalId(Long servicePrincipalId) { this.servicePrincipalId = servicePrincipalId; return this; @@ -27,17 +43,19 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CreateServicePrincipalSecretRequest that = (CreateServicePrincipalSecretRequest) o; - return Objects.equals(servicePrincipalId, that.servicePrincipalId); + return Objects.equals(lifetime, that.lifetime) + && Objects.equals(servicePrincipalId, that.servicePrincipalId); } @Override public int hashCode() { - return Objects.hash(servicePrincipalId); + return Objects.hash(lifetime, servicePrincipalId); } @Override public String toString() { return new ToStringer(CreateServicePrincipalSecretRequest.class) + .add("lifetime", lifetime) .add("servicePrincipalId", servicePrincipalId) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalSecretResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalSecretResponse.java index af9fa65c7..639e5889f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalSecretResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalSecretResponse.java @@ -13,6 +13,12 @@ public class CreateServicePrincipalSecretResponse { @JsonProperty("create_time") private String createTime; + /** + * UTC time when the secret will expire. If the field is not present, the secret does not expire. + */ + @JsonProperty("expire_time") + private String expireTime; + /** ID of the secret */ @JsonProperty("id") private String id; @@ -42,6 +48,15 @@ public String getCreateTime() { return createTime; } + public CreateServicePrincipalSecretResponse setExpireTime(String expireTime) { + this.expireTime = expireTime; + return this; + } + + public String getExpireTime() { + return expireTime; + } + public CreateServicePrincipalSecretResponse setId(String id) { this.id = id; return this; @@ -93,6 +108,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; CreateServicePrincipalSecretResponse that = (CreateServicePrincipalSecretResponse) o; return Objects.equals(createTime, that.createTime) + && Objects.equals(expireTime, that.expireTime) && Objects.equals(id, that.id) && Objects.equals(secret, that.secret) && Objects.equals(secretHash, that.secretHash) @@ -102,13 +118,14 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(createTime, id, secret, secretHash, status, updateTime); + return Objects.hash(createTime, expireTime, id, secret, secretHash, status, updateTime); } @Override public String toString() { return new ToStringer(CreateServicePrincipalSecretResponse.class) .add("createTime", createTime) + .add("expireTime", expireTime) .add("id", id) .add("secret", secret) .add("secretHash", secretHash) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/OidcFederationPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/OidcFederationPolicy.java index ffe72ba17..390d2d234 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/OidcFederationPolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/OidcFederationPolicy.java @@ -26,10 +26,11 @@ public class OidcFederationPolicy { private String issuer; /** - * The public keys used to validate the signature of federated tokens, in JWKS format. If - * unspecified (recommended), Databricks automatically fetches the public keys from your issuer’s - * well known endpoint. Databricks strongly recommends relying on your issuer’s well known - * endpoint for discovering public keys. + * The public keys used to validate the signature of federated tokens, in JWKS format. Most use + * cases should not need to specify this field. If jwks_uri and jwks_json are both unspecified + * (recommended), Databricks automatically fetches the public keys from your issuer’s well known + * endpoint. Databricks strongly recommends relying on your issuer’s well known endpoint for + * discovering public keys. */ @JsonProperty("jwks_json") private String jwksJson; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/SecretInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/SecretInfo.java index 97a957543..503275d5e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/SecretInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/SecretInfo.java @@ -13,6 +13,12 @@ public class SecretInfo { @JsonProperty("create_time") private String createTime; + /** + * UTC time when the secret will expire. If the field is not present, the secret does not expire. + */ + @JsonProperty("expire_time") + private String expireTime; + /** ID of the secret */ @JsonProperty("id") private String id; @@ -38,6 +44,15 @@ public String getCreateTime() { return createTime; } + public SecretInfo setExpireTime(String expireTime) { + this.expireTime = expireTime; + return this; + } + + public String getExpireTime() { + return expireTime; + } + public SecretInfo setId(String id) { this.id = id; return this; @@ -80,6 +95,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; SecretInfo that = (SecretInfo) o; return Objects.equals(createTime, that.createTime) + && Objects.equals(expireTime, that.expireTime) && Objects.equals(id, that.id) && Objects.equals(secretHash, that.secretHash) && Objects.equals(status, that.status) @@ -88,13 +104,14 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(createTime, id, secretHash, status, updateTime); + return Objects.hash(createTime, expireTime, id, secretHash, status, updateTime); } @Override public String toString() { return new ToStringer(SecretInfo.class) .add("createTime", createTime) + .add("expireTime", expireTime) .add("id", id) .add("secretHash", secretHash) .add("status", status) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsImpl.java index dedcff20f..3579430b5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsImpl.java @@ -23,9 +23,10 @@ public CreateServicePrincipalSecretResponse create(CreateServicePrincipalSecretR "/api/2.0/accounts/%s/servicePrincipals/%s/credentials/secrets", apiClient.configuredAccountID(), request.getServicePrincipalId()); try { - Request req = new Request("POST", path); + Request req = new Request("POST", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); return apiClient.execute(req, CreateServicePrincipalSecretResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfig.java index 176f17334..430410354 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfig.java @@ -56,6 +56,15 @@ public class AmazonBedrockConfig { @JsonProperty("bedrock_provider") private AmazonBedrockConfigBedrockProvider bedrockProvider; + /** + * ARN of the instance profile that the external model will use to access AWS resources. You must + * authenticate using an instance profile or access keys. If you prefer to authenticate using + * access keys, see `aws_access_key_id`, `aws_access_key_id_plaintext`, `aws_secret_access_key` + * and `aws_secret_access_key_plaintext`. + */ + @JsonProperty("instance_profile_arn") + private String instanceProfileArn; + public AmazonBedrockConfig setAwsAccessKeyId(String awsAccessKeyId) { this.awsAccessKeyId = awsAccessKeyId; return this; @@ -111,6 +120,15 @@ public AmazonBedrockConfigBedrockProvider getBedrockProvider() { return bedrockProvider; } + public AmazonBedrockConfig setInstanceProfileArn(String instanceProfileArn) { + this.instanceProfileArn = instanceProfileArn; + return this; + } + + public String getInstanceProfileArn() { + return instanceProfileArn; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -121,7 +139,8 @@ public boolean equals(Object o) { && Objects.equals(awsRegion, that.awsRegion) && Objects.equals(awsSecretAccessKey, that.awsSecretAccessKey) && Objects.equals(awsSecretAccessKeyPlaintext, that.awsSecretAccessKeyPlaintext) - && Objects.equals(bedrockProvider, that.bedrockProvider); + && Objects.equals(bedrockProvider, that.bedrockProvider) + && Objects.equals(instanceProfileArn, that.instanceProfileArn); } @Override @@ -132,7 +151,8 @@ public int hashCode() { awsRegion, awsSecretAccessKey, awsSecretAccessKeyPlaintext, - bedrockProvider); + bedrockProvider, + instanceProfileArn); } @Override @@ -144,6 +164,7 @@ public String toString() { .add("awsSecretAccessKey", awsSecretAccessKey) .add("awsSecretAccessKeyPlaintext", awsSecretAccessKeyPlaintext) .add("bedrockProvider", bedrockProvider) + .add("instanceProfileArn", instanceProfileArn) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreateServingEndpoint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreateServingEndpoint.java index e9362231f..1e3ed8301 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreateServingEndpoint.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreateServingEndpoint.java @@ -17,6 +17,10 @@ public class CreateServingEndpoint { @JsonProperty("ai_gateway") private AiGatewayConfig aiGateway; + /** The budget policy to be applied to the serving endpoint. */ + @JsonProperty("budget_policy_id") + private String budgetPolicyId; + /** The core config of the serving endpoint. */ @JsonProperty("config") private EndpointCoreConfigInput config; @@ -52,6 +56,15 @@ public AiGatewayConfig getAiGateway() { return aiGateway; } + public CreateServingEndpoint setBudgetPolicyId(String budgetPolicyId) { + this.budgetPolicyId = budgetPolicyId; + return this; + } + + public String getBudgetPolicyId() { + return budgetPolicyId; + } + public CreateServingEndpoint setConfig(EndpointCoreConfigInput config) { this.config = config; return this; @@ -103,6 +116,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; CreateServingEndpoint that = (CreateServingEndpoint) o; return Objects.equals(aiGateway, that.aiGateway) + && Objects.equals(budgetPolicyId, that.budgetPolicyId) && Objects.equals(config, that.config) && Objects.equals(name, that.name) && Objects.equals(rateLimits, that.rateLimits) @@ -112,13 +126,14 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(aiGateway, config, name, rateLimits, routeOptimized, tags); + return Objects.hash(aiGateway, budgetPolicyId, config, name, rateLimits, routeOptimized, tags); } @Override public String toString() { return new ToStringer(CreateServingEndpoint.class) .add("aiGateway", aiGateway) + .add("budgetPolicyId", budgetPolicyId) .add("config", config) .add("name", name) .add("rateLimits", rateLimits) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpoint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpoint.java index 391d622e7..d213e6dcb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpoint.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpoint.java @@ -17,6 +17,10 @@ public class ServingEndpoint { @JsonProperty("ai_gateway") private AiGatewayConfig aiGateway; + /** The budget policy associated with the endpoint. */ + @JsonProperty("budget_policy_id") + private String budgetPolicyId; + /** The config that is currently being served by the endpoint. */ @JsonProperty("config") private EndpointCoreConfigSummary config; @@ -62,6 +66,15 @@ public AiGatewayConfig getAiGateway() { return aiGateway; } + public ServingEndpoint setBudgetPolicyId(String budgetPolicyId) { + this.budgetPolicyId = budgetPolicyId; + return this; + } + + public String getBudgetPolicyId() { + return budgetPolicyId; + } + public ServingEndpoint setConfig(EndpointCoreConfigSummary config) { this.config = config; return this; @@ -149,6 +162,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; ServingEndpoint that = (ServingEndpoint) o; return Objects.equals(aiGateway, that.aiGateway) + && Objects.equals(budgetPolicyId, that.budgetPolicyId) && Objects.equals(config, that.config) && Objects.equals(creationTimestamp, that.creationTimestamp) && Objects.equals(creator, that.creator) @@ -164,6 +178,7 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( aiGateway, + budgetPolicyId, config, creationTimestamp, creator, @@ -179,6 +194,7 @@ public int hashCode() { public String toString() { return new ToStringer(ServingEndpoint.class) .add("aiGateway", aiGateway) + .add("budgetPolicyId", budgetPolicyId) .add("config", config) .add("creationTimestamp", creationTimestamp) .add("creator", creator) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointDetailed.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointDetailed.java index a0cc5a1c4..2169edd4a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointDetailed.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointDetailed.java @@ -17,6 +17,10 @@ public class ServingEndpointDetailed { @JsonProperty("ai_gateway") private AiGatewayConfig aiGateway; + /** The budget policy associated with the endpoint. */ + @JsonProperty("budget_policy_id") + private String budgetPolicyId; + /** The config that is currently being served by the endpoint. */ @JsonProperty("config") private EndpointCoreConfigOutput config; @@ -85,6 +89,15 @@ public AiGatewayConfig getAiGateway() { return aiGateway; } + public ServingEndpointDetailed setBudgetPolicyId(String budgetPolicyId) { + this.budgetPolicyId = budgetPolicyId; + return this; + } + + public String getBudgetPolicyId() { + return budgetPolicyId; + } + public ServingEndpointDetailed setConfig(EndpointCoreConfigOutput config) { this.config = config; return this; @@ -218,6 +231,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; ServingEndpointDetailed that = (ServingEndpointDetailed) o; return Objects.equals(aiGateway, that.aiGateway) + && Objects.equals(budgetPolicyId, that.budgetPolicyId) && Objects.equals(config, that.config) && Objects.equals(creationTimestamp, that.creationTimestamp) && Objects.equals(creator, that.creator) @@ -238,6 +252,7 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( aiGateway, + budgetPolicyId, config, creationTimestamp, creator, @@ -258,6 +273,7 @@ public int hashCode() { public String toString() { return new ToStringer(ServingEndpointDetailed.class) .add("aiGateway", aiGateway) + .add("budgetPolicyId", budgetPolicyId) .add("config", config) .add("creationTimestamp", creationTimestamp) .add("creator", creator) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java index 5a32ebcbc..33b47d710 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java @@ -52,7 +52,6 @@ public void delete(DeleteServingEndpointRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsImpl.java index d5367da61..eedc75e39 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsImpl.java @@ -40,7 +40,6 @@ public void delete(DeleteAccountIpAccessListRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -85,7 +84,6 @@ public void replace(ReplaceIpAccessList request) { try { Request req = new Request("PUT", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, ReplaceResponse.class); } catch (IOException e) { @@ -102,7 +100,6 @@ public void update(UpdateIpAccessList request) { try { Request req = new Request("PATCH", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java index d0cc96208..b9cca1598 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java @@ -36,7 +36,6 @@ public void delete(DeleteIpAccessListRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -74,7 +73,6 @@ public void replace(ReplaceIpAccessList request) { try { Request req = new Request("PUT", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, ReplaceResponse.class); } catch (IOException e) { @@ -88,7 +86,6 @@ public void update(UpdateIpAccessList request) { try { Request req = new Request("PATCH", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementImpl.java index 3eec231b5..8264976e5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementImpl.java @@ -36,7 +36,6 @@ public void delete(DeleteTokenManagementRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/AuthenticationType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/AuthenticationType.java index 2a8a04ae9..6b48f36b2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/AuthenticationType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/AuthenticationType.java @@ -8,5 +8,6 @@ @Generated public enum AuthenticationType { DATABRICKS, + OAUTH_CLIENT_CREDENTIALS, TOKEN, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnTypeName.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnTypeName.java new file mode 100755 index 000000000..6a682f628 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnTypeName.java @@ -0,0 +1,35 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; + +/** + * UC supported column types Copied from + * https://src.dev.databricks.com/databricks/universe@23a85902bb58695ab9293adc9f327b0714b55e72/-/blob/managed-catalog/api/messages/table.proto?L68 + */ +@Generated +public enum ColumnTypeName { + ARRAY, + BINARY, + BOOLEAN, + BYTE, + CHAR, + DATE, + DECIMAL, + DOUBLE, + FLOAT, + INT, + INTERVAL, + LONG, + MAP, + NULL, + SHORT, + STRING, + STRUCT, + TABLE_TYPE, + TIMESTAMP, + TIMESTAMP_NTZ, + USER_DEFINED_TYPE, + VARIANT, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependency.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependency.java new file mode 100755 index 000000000..93166cac2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependency.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Represents a UC dependency. */ +@Generated +public class DeltaSharingDependency { + /** A Function in UC as a dependency. */ + @JsonProperty("function") + private DeltaSharingFunctionDependency function; + + /** A Table in UC as a dependency. */ + @JsonProperty("table") + private DeltaSharingTableDependency table; + + public DeltaSharingDependency setFunction(DeltaSharingFunctionDependency function) { + this.function = function; + return this; + } + + public DeltaSharingFunctionDependency getFunction() { + return function; + } + + public DeltaSharingDependency setTable(DeltaSharingTableDependency table) { + this.table = table; + return this; + } + + public DeltaSharingTableDependency getTable() { + return table; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeltaSharingDependency that = (DeltaSharingDependency) o; + return Objects.equals(function, that.function) && Objects.equals(table, that.table); + } + + @Override + public int hashCode() { + return Objects.hash(function, table); + } + + @Override + public String toString() { + return new ToStringer(DeltaSharingDependency.class) + .add("function", function) + .add("table", table) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependencyList.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependencyList.java new file mode 100755 index 000000000..b4a4a0b23 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependencyList.java @@ -0,0 +1,47 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Represents a list of dependencies. */ +@Generated +public class DeltaSharingDependencyList { + /** An array of Dependency. */ + @JsonProperty("dependencies") + private Collection dependencies; + + public DeltaSharingDependencyList setDependencies( + Collection dependencies) { + this.dependencies = dependencies; + return this; + } + + public Collection getDependencies() { + return dependencies; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeltaSharingDependencyList that = (DeltaSharingDependencyList) o; + return Objects.equals(dependencies, that.dependencies); + } + + @Override + public int hashCode() { + return Objects.hash(dependencies); + } + + @Override + public String toString() { + return new ToStringer(DeltaSharingDependencyList.class) + .add("dependencies", dependencies) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingFunctionDependency.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingFunctionDependency.java new file mode 100755 index 000000000..33c02ebb8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingFunctionDependency.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** A Function in UC as a dependency. */ +@Generated +public class DeltaSharingFunctionDependency { + /** */ + @JsonProperty("function_name") + private String functionName; + + /** */ + @JsonProperty("schema_name") + private String schemaName; + + public DeltaSharingFunctionDependency setFunctionName(String functionName) { + this.functionName = functionName; + return this; + } + + public String getFunctionName() { + return functionName; + } + + public DeltaSharingFunctionDependency setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public String getSchemaName() { + return schemaName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeltaSharingFunctionDependency that = (DeltaSharingFunctionDependency) o; + return Objects.equals(functionName, that.functionName) + && Objects.equals(schemaName, that.schemaName); + } + + @Override + public int hashCode() { + return Objects.hash(functionName, schemaName); + } + + @Override + public String toString() { + return new ToStringer(DeltaSharingFunctionDependency.class) + .add("functionName", functionName) + .add("schemaName", schemaName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingTableDependency.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingTableDependency.java new file mode 100755 index 000000000..96bc1c4b3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingTableDependency.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** A Table in UC as a dependency. */ +@Generated +public class DeltaSharingTableDependency { + /** */ + @JsonProperty("schema_name") + private String schemaName; + + /** */ + @JsonProperty("table_name") + private String tableName; + + public DeltaSharingTableDependency setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public String getSchemaName() { + return schemaName; + } + + public DeltaSharingTableDependency setTableName(String tableName) { + this.tableName = tableName; + return this; + } + + public String getTableName() { + return tableName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeltaSharingTableDependency that = (DeltaSharingTableDependency) o; + return Objects.equals(schemaName, that.schemaName) && Objects.equals(tableName, that.tableName); + } + + @Override + public int hashCode() { + return Objects.hash(schemaName, tableName); + } + + @Override + public String toString() { + return new ToStringer(DeltaSharingTableDependency.class) + .add("schemaName", schemaName) + .add("tableName", tableName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Function.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Function.java new file mode 100755 index 000000000..e68e2a31b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Function.java @@ -0,0 +1,286 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class Function { + /** The aliass of registered model. */ + @JsonProperty("aliases") + private Collection aliases; + + /** The comment of the function. */ + @JsonProperty("comment") + private String comment; + + /** The data type of the function. */ + @JsonProperty("data_type") + private ColumnTypeName dataType; + + /** The dependency list of the function. */ + @JsonProperty("dependency_list") + private DeltaSharingDependencyList dependencyList; + + /** The full data type of the function. */ + @JsonProperty("full_data_type") + private String fullDataType; + + /** The id of the function. */ + @JsonProperty("id") + private String id; + + /** The function parameter information. */ + @JsonProperty("input_params") + private FunctionParameterInfos inputParams; + + /** The name of the function. */ + @JsonProperty("name") + private String name; + + /** The properties of the function. */ + @JsonProperty("properties") + private String properties; + + /** The routine definition of the function. */ + @JsonProperty("routine_definition") + private String routineDefinition; + + /** The name of the schema that the function belongs to. */ + @JsonProperty("schema") + private String schema; + + /** The securable kind of the function. */ + @JsonProperty("securable_kind") + private SharedSecurableKind securableKind; + + /** The name of the share that the function belongs to. */ + @JsonProperty("share") + private String share; + + /** The id of the share that the function belongs to. */ + @JsonProperty("share_id") + private String shareId; + + /** The storage location of the function. */ + @JsonProperty("storage_location") + private String storageLocation; + + /** The tags of the function. */ + @JsonProperty("tags") + private Collection tags; + + public Function setAliases(Collection aliases) { + this.aliases = aliases; + return this; + } + + public Collection getAliases() { + return aliases; + } + + public Function setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public Function setDataType(ColumnTypeName dataType) { + this.dataType = dataType; + return this; + } + + public ColumnTypeName getDataType() { + return dataType; + } + + public Function setDependencyList(DeltaSharingDependencyList dependencyList) { + this.dependencyList = dependencyList; + return this; + } + + public DeltaSharingDependencyList getDependencyList() { + return dependencyList; + } + + public Function setFullDataType(String fullDataType) { + this.fullDataType = fullDataType; + return this; + } + + public String getFullDataType() { + return fullDataType; + } + + public Function setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public Function setInputParams(FunctionParameterInfos inputParams) { + this.inputParams = inputParams; + return this; + } + + public FunctionParameterInfos getInputParams() { + return inputParams; + } + + public Function setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public Function setProperties(String properties) { + this.properties = properties; + return this; + } + + public String getProperties() { + return properties; + } + + public Function setRoutineDefinition(String routineDefinition) { + this.routineDefinition = routineDefinition; + return this; + } + + public String getRoutineDefinition() { + return routineDefinition; + } + + public Function setSchema(String schema) { + this.schema = schema; + return this; + } + + public String getSchema() { + return schema; + } + + public Function setSecurableKind(SharedSecurableKind securableKind) { + this.securableKind = securableKind; + return this; + } + + public SharedSecurableKind getSecurableKind() { + return securableKind; + } + + public Function setShare(String share) { + this.share = share; + return this; + } + + public String getShare() { + return share; + } + + public Function setShareId(String shareId) { + this.shareId = shareId; + return this; + } + + public String getShareId() { + return shareId; + } + + public Function setStorageLocation(String storageLocation) { + this.storageLocation = storageLocation; + return this; + } + + public String getStorageLocation() { + return storageLocation; + } + + public Function setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Function that = (Function) o; + return Objects.equals(aliases, that.aliases) + && Objects.equals(comment, that.comment) + && Objects.equals(dataType, that.dataType) + && Objects.equals(dependencyList, that.dependencyList) + && Objects.equals(fullDataType, that.fullDataType) + && Objects.equals(id, that.id) + && Objects.equals(inputParams, that.inputParams) + && Objects.equals(name, that.name) + && Objects.equals(properties, that.properties) + && Objects.equals(routineDefinition, that.routineDefinition) + && Objects.equals(schema, that.schema) + && Objects.equals(securableKind, that.securableKind) + && Objects.equals(share, that.share) + && Objects.equals(shareId, that.shareId) + && Objects.equals(storageLocation, that.storageLocation) + && Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash( + aliases, + comment, + dataType, + dependencyList, + fullDataType, + id, + inputParams, + name, + properties, + routineDefinition, + schema, + securableKind, + share, + shareId, + storageLocation, + tags); + } + + @Override + public String toString() { + return new ToStringer(Function.class) + .add("aliases", aliases) + .add("comment", comment) + .add("dataType", dataType) + .add("dependencyList", dependencyList) + .add("fullDataType", fullDataType) + .add("id", id) + .add("inputParams", inputParams) + .add("name", name) + .add("properties", properties) + .add("routineDefinition", routineDefinition) + .add("schema", schema) + .add("securableKind", securableKind) + .add("share", share) + .add("shareId", shareId) + .add("storageLocation", storageLocation) + .add("tags", tags) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterInfo.java new file mode 100755 index 000000000..81b11e045 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterInfo.java @@ -0,0 +1,224 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Represents a parameter of a function. The same message is used for both input and output columns. + */ +@Generated +public class FunctionParameterInfo { + /** The comment of the parameter. */ + @JsonProperty("comment") + private String comment; + + /** The name of the parameter. */ + @JsonProperty("name") + private String name; + + /** The default value of the parameter. */ + @JsonProperty("parameter_default") + private String parameterDefault; + + /** The mode of the function parameter. */ + @JsonProperty("parameter_mode") + private FunctionParameterMode parameterMode; + + /** The type of the function parameter. */ + @JsonProperty("parameter_type") + private FunctionParameterType parameterType; + + /** The position of the parameter. */ + @JsonProperty("position") + private Long position; + + /** The interval type of the parameter type. */ + @JsonProperty("type_interval_type") + private String typeIntervalType; + + /** The type of the parameter in JSON format. */ + @JsonProperty("type_json") + private String typeJson; + + /** The type of the parameter in Enum format. */ + @JsonProperty("type_name") + private ColumnTypeName typeName; + + /** The precision of the parameter type. */ + @JsonProperty("type_precision") + private Long typePrecision; + + /** The scale of the parameter type. */ + @JsonProperty("type_scale") + private Long typeScale; + + /** The type of the parameter in text format. */ + @JsonProperty("type_text") + private String typeText; + + public FunctionParameterInfo setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public FunctionParameterInfo setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public FunctionParameterInfo setParameterDefault(String parameterDefault) { + this.parameterDefault = parameterDefault; + return this; + } + + public String getParameterDefault() { + return parameterDefault; + } + + public FunctionParameterInfo setParameterMode(FunctionParameterMode parameterMode) { + this.parameterMode = parameterMode; + return this; + } + + public FunctionParameterMode getParameterMode() { + return parameterMode; + } + + public FunctionParameterInfo setParameterType(FunctionParameterType parameterType) { + this.parameterType = parameterType; + return this; + } + + public FunctionParameterType getParameterType() { + return parameterType; + } + + public FunctionParameterInfo setPosition(Long position) { + this.position = position; + return this; + } + + public Long getPosition() { + return position; + } + + public FunctionParameterInfo setTypeIntervalType(String typeIntervalType) { + this.typeIntervalType = typeIntervalType; + return this; + } + + public String getTypeIntervalType() { + return typeIntervalType; + } + + public FunctionParameterInfo setTypeJson(String typeJson) { + this.typeJson = typeJson; + return this; + } + + public String getTypeJson() { + return typeJson; + } + + public FunctionParameterInfo setTypeName(ColumnTypeName typeName) { + this.typeName = typeName; + return this; + } + + public ColumnTypeName getTypeName() { + return typeName; + } + + public FunctionParameterInfo setTypePrecision(Long typePrecision) { + this.typePrecision = typePrecision; + return this; + } + + public Long getTypePrecision() { + return typePrecision; + } + + public FunctionParameterInfo setTypeScale(Long typeScale) { + this.typeScale = typeScale; + return this; + } + + public Long getTypeScale() { + return typeScale; + } + + public FunctionParameterInfo setTypeText(String typeText) { + this.typeText = typeText; + return this; + } + + public String getTypeText() { + return typeText; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FunctionParameterInfo that = (FunctionParameterInfo) o; + return Objects.equals(comment, that.comment) + && Objects.equals(name, that.name) + && Objects.equals(parameterDefault, that.parameterDefault) + && Objects.equals(parameterMode, that.parameterMode) + && Objects.equals(parameterType, that.parameterType) + && Objects.equals(position, that.position) + && Objects.equals(typeIntervalType, that.typeIntervalType) + && Objects.equals(typeJson, that.typeJson) + && Objects.equals(typeName, that.typeName) + && Objects.equals(typePrecision, that.typePrecision) + && Objects.equals(typeScale, that.typeScale) + && Objects.equals(typeText, that.typeText); + } + + @Override + public int hashCode() { + return Objects.hash( + comment, + name, + parameterDefault, + parameterMode, + parameterType, + position, + typeIntervalType, + typeJson, + typeName, + typePrecision, + typeScale, + typeText); + } + + @Override + public String toString() { + return new ToStringer(FunctionParameterInfo.class) + .add("comment", comment) + .add("name", name) + .add("parameterDefault", parameterDefault) + .add("parameterMode", parameterMode) + .add("parameterType", parameterType) + .add("position", position) + .add("typeIntervalType", typeIntervalType) + .add("typeJson", typeJson) + .add("typeName", typeName) + .add("typePrecision", typePrecision) + .add("typeScale", typeScale) + .add("typeText", typeText) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterInfos.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterInfos.java new file mode 100755 index 000000000..3f1217b3e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterInfos.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class FunctionParameterInfos { + /** The list of parameters of the function. */ + @JsonProperty("parameters") + private Collection parameters; + + public FunctionParameterInfos setParameters(Collection parameters) { + this.parameters = parameters; + return this; + } + + public Collection getParameters() { + return parameters; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FunctionParameterInfos that = (FunctionParameterInfos) o; + return Objects.equals(parameters, that.parameters); + } + + @Override + public int hashCode() { + return Objects.hash(parameters); + } + + @Override + public String toString() { + return new ToStringer(FunctionParameterInfos.class).add("parameters", parameters).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterMode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterMode.java new file mode 100755 index 000000000..3ba70e856 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterMode.java @@ -0,0 +1,12 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum FunctionParameterMode { + IN, + INOUT, + OUT, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterType.java new file mode 100755 index 000000000..2ee4562f2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterType.java @@ -0,0 +1,11 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum FunctionParameterType { + COLUMN, + PARAM, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetSharePermissionsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetSharePermissionsResponse.java new file mode 100755 index 000000000..d635d5575 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetSharePermissionsResponse.java @@ -0,0 +1,64 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class GetSharePermissionsResponse { + /** + * Opaque token to retrieve the next page of results. Absent if there are no more pages. + * __page_token__ should be set to this value for the next request (for the next page of results). + */ + @JsonProperty("next_page_token") + private String nextPageToken; + + /** The privileges assigned to each principal */ + @JsonProperty("privilege_assignments") + private Collection privilegeAssignments; + + public GetSharePermissionsResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public GetSharePermissionsResponse setPrivilegeAssignments( + Collection privilegeAssignments) { + this.privilegeAssignments = privilegeAssignments; + return this; + } + + public Collection getPrivilegeAssignments() { + return privilegeAssignments; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetSharePermissionsResponse that = (GetSharePermissionsResponse) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(privilegeAssignments, that.privilegeAssignments); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, privilegeAssignments); + } + + @Override + public String toString() { + return new ToStringer(GetSharePermissionsResponse.class) + .add("nextPageToken", nextPageToken) + .add("privilegeAssignments", privilegeAssignments) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderShareAssetsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderShareAssetsRequest.java new file mode 100755 index 000000000..fea546dc1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderShareAssetsRequest.java @@ -0,0 +1,129 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List assets by provider share */ +@Generated +public class ListProviderShareAssetsRequest { + /** Maximum number of functions to return. */ + @JsonIgnore + @QueryParam("function_max_results") + private Long functionMaxResults; + + /** Maximum number of notebooks to return. */ + @JsonIgnore + @QueryParam("notebook_max_results") + private Long notebookMaxResults; + + /** The name of the provider who owns the share. */ + @JsonIgnore private String providerName; + + /** The name of the share. */ + @JsonIgnore private String shareName; + + /** Maximum number of tables to return. */ + @JsonIgnore + @QueryParam("table_max_results") + private Long tableMaxResults; + + /** Maximum number of volumes to return. */ + @JsonIgnore + @QueryParam("volume_max_results") + private Long volumeMaxResults; + + public ListProviderShareAssetsRequest setFunctionMaxResults(Long functionMaxResults) { + this.functionMaxResults = functionMaxResults; + return this; + } + + public Long getFunctionMaxResults() { + return functionMaxResults; + } + + public ListProviderShareAssetsRequest setNotebookMaxResults(Long notebookMaxResults) { + this.notebookMaxResults = notebookMaxResults; + return this; + } + + public Long getNotebookMaxResults() { + return notebookMaxResults; + } + + public ListProviderShareAssetsRequest setProviderName(String providerName) { + this.providerName = providerName; + return this; + } + + public String getProviderName() { + return providerName; + } + + public ListProviderShareAssetsRequest setShareName(String shareName) { + this.shareName = shareName; + return this; + } + + public String getShareName() { + return shareName; + } + + public ListProviderShareAssetsRequest setTableMaxResults(Long tableMaxResults) { + this.tableMaxResults = tableMaxResults; + return this; + } + + public Long getTableMaxResults() { + return tableMaxResults; + } + + public ListProviderShareAssetsRequest setVolumeMaxResults(Long volumeMaxResults) { + this.volumeMaxResults = volumeMaxResults; + return this; + } + + public Long getVolumeMaxResults() { + return volumeMaxResults; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListProviderShareAssetsRequest that = (ListProviderShareAssetsRequest) o; + return Objects.equals(functionMaxResults, that.functionMaxResults) + && Objects.equals(notebookMaxResults, that.notebookMaxResults) + && Objects.equals(providerName, that.providerName) + && Objects.equals(shareName, that.shareName) + && Objects.equals(tableMaxResults, that.tableMaxResults) + && Objects.equals(volumeMaxResults, that.volumeMaxResults); + } + + @Override + public int hashCode() { + return Objects.hash( + functionMaxResults, + notebookMaxResults, + providerName, + shareName, + tableMaxResults, + volumeMaxResults); + } + + @Override + public String toString() { + return new ToStringer(ListProviderShareAssetsRequest.class) + .add("functionMaxResults", functionMaxResults) + .add("notebookMaxResults", notebookMaxResults) + .add("providerName", providerName) + .add("shareName", shareName) + .add("tableMaxResults", tableMaxResults) + .add("volumeMaxResults", volumeMaxResults) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderShareAssetsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderShareAssetsResponse.java new file mode 100755 index 000000000..45252078d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderShareAssetsResponse.java @@ -0,0 +1,91 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Response to ListProviderShareAssets, which contains the list of assets of a share. */ +@Generated +public class ListProviderShareAssetsResponse { + /** The list of functions in the share. */ + @JsonProperty("functions") + private Collection functions; + + /** The list of notebooks in the share. */ + @JsonProperty("notebooks") + private Collection notebooks; + + /** The list of tables in the share. */ + @JsonProperty("tables") + private Collection tables; + + /** The list of volumes in the share. */ + @JsonProperty("volumes") + private Collection volumes; + + public ListProviderShareAssetsResponse setFunctions(Collection functions) { + this.functions = functions; + return this; + } + + public Collection getFunctions() { + return functions; + } + + public ListProviderShareAssetsResponse setNotebooks(Collection notebooks) { + this.notebooks = notebooks; + return this; + } + + public Collection getNotebooks() { + return notebooks; + } + + public ListProviderShareAssetsResponse setTables(Collection
tables) { + this.tables = tables; + return this; + } + + public Collection
getTables() { + return tables; + } + + public ListProviderShareAssetsResponse setVolumes(Collection volumes) { + this.volumes = volumes; + return this; + } + + public Collection getVolumes() { + return volumes; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListProviderShareAssetsResponse that = (ListProviderShareAssetsResponse) o; + return Objects.equals(functions, that.functions) + && Objects.equals(notebooks, that.notebooks) + && Objects.equals(tables, that.tables) + && Objects.equals(volumes, that.volumes); + } + + @Override + public int hashCode() { + return Objects.hash(functions, notebooks, tables, volumes); + } + + @Override + public String toString() { + return new ToStringer(ListProviderShareAssetsResponse.class) + .add("functions", functions) + .add("notebooks", notebooks) + .add("tables", tables) + .add("volumes", volumes) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/NotebookFile.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/NotebookFile.java new file mode 100755 index 000000000..f2194dbed --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/NotebookFile.java @@ -0,0 +1,120 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class NotebookFile { + /** The comment of the notebook file. */ + @JsonProperty("comment") + private String comment; + + /** The id of the notebook file. */ + @JsonProperty("id") + private String id; + + /** Name of the notebook file. */ + @JsonProperty("name") + private String name; + + /** The name of the share that the notebook file belongs to. */ + @JsonProperty("share") + private String share; + + /** The id of the share that the notebook file belongs to. */ + @JsonProperty("share_id") + private String shareId; + + /** The tags of the notebook file. */ + @JsonProperty("tags") + private Collection tags; + + public NotebookFile setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public NotebookFile setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public NotebookFile setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public NotebookFile setShare(String share) { + this.share = share; + return this; + } + + public String getShare() { + return share; + } + + public NotebookFile setShareId(String shareId) { + this.shareId = shareId; + return this; + } + + public String getShareId() { + return shareId; + } + + public NotebookFile setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NotebookFile that = (NotebookFile) o; + return Objects.equals(comment, that.comment) + && Objects.equals(id, that.id) + && Objects.equals(name, that.name) + && Objects.equals(share, that.share) + && Objects.equals(shareId, that.shareId) + && Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash(comment, id, name, share, shareId, tags); + } + + @Override + public String toString() { + return new ToStringer(NotebookFile.class) + .add("comment", comment) + .add("id", id) + .add("name", name) + .add("share", share) + .add("shareId", shareId) + .add("tags", tags) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PartitionSpecificationPartition.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PartitionSpecificationPartition.java deleted file mode 100755 index 98b162750..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PartitionSpecificationPartition.java +++ /dev/null @@ -1,43 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; -import java.util.Objects; - -@Generated -public class PartitionSpecificationPartition { - /** An array of partition values. */ - @JsonProperty("values") - private Collection values; - - public PartitionSpecificationPartition setValues(Collection values) { - this.values = values; - return this; - } - - public Collection getValues() { - return values; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - PartitionSpecificationPartition that = (PartitionSpecificationPartition) o; - return Objects.equals(values, that.values); - } - - @Override - public int hashCode() { - return Objects.hash(values); - } - - @Override - public String toString() { - return new ToStringer(PartitionSpecificationPartition.class).add("values", values).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PermissionsChange.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PermissionsChange.java new file mode 100755 index 000000000..d6e35637a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PermissionsChange.java @@ -0,0 +1,75 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class PermissionsChange { + /** The set of privileges to add. */ + @JsonProperty("add") + private Collection add; + + /** The principal whose privileges we are changing. */ + @JsonProperty("principal") + private String principal; + + /** The set of privileges to remove. */ + @JsonProperty("remove") + private Collection remove; + + public PermissionsChange setAdd(Collection add) { + this.add = add; + return this; + } + + public Collection getAdd() { + return add; + } + + public PermissionsChange setPrincipal(String principal) { + this.principal = principal; + return this; + } + + public String getPrincipal() { + return principal; + } + + public PermissionsChange setRemove(Collection remove) { + this.remove = remove; + return this; + } + + public Collection getRemove() { + return remove; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PermissionsChange that = (PermissionsChange) o; + return Objects.equals(add, that.add) + && Objects.equals(principal, that.principal) + && Objects.equals(remove, that.remove); + } + + @Override + public int hashCode() { + return Objects.hash(add, principal, remove); + } + + @Override + public String toString() { + return new ToStringer(PermissionsChange.class) + .add("add", add) + .add("principal", principal) + .add("remove", remove) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersAPI.java index 8f58e1da1..b049fc361 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersAPI.java @@ -94,6 +94,23 @@ public Iterable list(ListProvidersRequest request) { }); } + public ListProviderShareAssetsResponse listProviderShareAssets( + String providerName, String shareName) { + return listProviderShareAssets( + new ListProviderShareAssetsRequest().setProviderName(providerName).setShareName(shareName)); + } + + /** + * List assets by provider share. + * + *

Get arrays of assets associated with a specified provider's share. The caller is the + * recipient of the share. + */ + public ListProviderShareAssetsResponse listProviderShareAssets( + ListProviderShareAssetsRequest request) { + return impl.listProviderShareAssets(request); + } + public Iterable listShares(String name) { return listShares(new ListSharesRequest().setName(name)); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersImpl.java index 9d49090a6..273efebdb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersImpl.java @@ -36,7 +36,6 @@ public void delete(DeleteProviderRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -69,6 +68,23 @@ public ListProvidersResponse list(ListProvidersRequest request) { } } + @Override + public ListProviderShareAssetsResponse listProviderShareAssets( + ListProviderShareAssetsRequest request) { + String path = + String.format( + "/api/2.1/data-sharing/providers/%s/shares/%s", + request.getProviderName(), request.getShareName()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListProviderShareAssetsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public ListProviderSharesResponse listShares(ListSharesRequest request) { String path = String.format("/api/2.1/unity-catalog/providers/%s/shares", request.getName()); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersService.java index a893ff48f..601031df1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersService.java @@ -46,6 +46,15 @@ public interface ProvidersService { */ ListProvidersResponse list(ListProvidersRequest listProvidersRequest); + /** + * List assets by provider share. + * + *

Get arrays of assets associated with a specified provider's share. The caller is the + * recipient of the share. + */ + ListProviderShareAssetsResponse listProviderShareAssets( + ListProviderShareAssetsRequest listProviderShareAssetsRequest); + /** * List shares by Provider. * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsImpl.java index 45f7c5ada..afd12e0eb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsImpl.java @@ -36,7 +36,6 @@ public void delete(DeleteRecipientRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RegisteredModelAlias.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RegisteredModelAlias.java new file mode 100755 index 000000000..a39bd7459 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RegisteredModelAlias.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class RegisteredModelAlias { + /** Name of the alias. */ + @JsonProperty("alias_name") + private String aliasName; + + /** Numeric model version that alias will reference. */ + @JsonProperty("version_num") + private Long versionNum; + + public RegisteredModelAlias setAliasName(String aliasName) { + this.aliasName = aliasName; + return this; + } + + public String getAliasName() { + return aliasName; + } + + public RegisteredModelAlias setVersionNum(Long versionNum) { + this.versionNum = versionNum; + return this; + } + + public Long getVersionNum() { + return versionNum; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RegisteredModelAlias that = (RegisteredModelAlias) o; + return Objects.equals(aliasName, that.aliasName) && Objects.equals(versionNum, that.versionNum); + } + + @Override + public int hashCode() { + return Objects.hash(aliasName, versionNum); + } + + @Override + public String toString() { + return new ToStringer(RegisteredModelAlias.class) + .add("aliasName", aliasName) + .add("versionNum", versionNum) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObject.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObject.java index db3ddc205..f6e090ef5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObject.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObject.java @@ -22,7 +22,7 @@ public class SharedDataObject { @JsonProperty("cdf_enabled") private Boolean cdfEnabled; - /** A user-provided comment when adding the data object to the share. [Update:OPT] */ + /** A user-provided comment when adding the data object to the share. */ @JsonProperty("comment") private String comment; @@ -46,10 +46,9 @@ public class SharedDataObject { private SharedDataObjectHistoryDataSharingStatus historyDataSharingStatus; /** - * A fully qualified name that uniquely identifies a data object. - * - * For example, a table's fully qualified name is in the format of - * `..

`. + * A fully qualified name that uniquely identifies a data object. For example, + * a table's fully qualified name is in the format of + * `..
`, */ @JsonProperty("name") private String name; @@ -83,10 +82,10 @@ public class SharedDataObject { private SharedDataObjectStatus status; /** - * A user-provided new name for the data object within the share. If this new name is not + * A user-provided new name for the shared object within the share. If this new name is not not * provided, the object's original name will be used as the `string_shared_as` name. The - * `string_shared_as` name must be unique within a share. For notebooks, the new name should be - * the new notebook file name. + * `string_shared_as` name must be unique for objects of the same type within a Share. For + * notebooks, the new name should be the new notebook file name. */ @JsonProperty("string_shared_as") private String stringSharedAs; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectDataObjectType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectDataObjectType.java index e6388116c..e5accbc93 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectDataObjectType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectDataObjectType.java @@ -4,7 +4,6 @@ import com.databricks.sdk.support.Generated; -/** The type of the data object. */ @Generated public enum SharedDataObjectDataObjectType { FEATURE_SPEC, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectHistoryDataSharingStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectHistoryDataSharingStatus.java index c3d88ca9a..cd81cfbfa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectHistoryDataSharingStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectHistoryDataSharingStatus.java @@ -4,10 +4,6 @@ import com.databricks.sdk.support.Generated; -/** - * Whether to enable or disable sharing of data history. If not specified, the default is - * **DISABLED**. - */ @Generated public enum SharedDataObjectHistoryDataSharingStatus { DISABLED, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectStatus.java index c89fea0f5..55fe7d225 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectStatus.java @@ -4,7 +4,6 @@ import com.databricks.sdk.support.Generated; -/** One of: **ACTIVE**, **PERMISSION_DENIED**. */ @Generated public enum SharedDataObjectStatus { ACTIVE, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectUpdateAction.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectUpdateAction.java index 9010be1b9..2eb69c4a1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectUpdateAction.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectUpdateAction.java @@ -4,7 +4,6 @@ import com.databricks.sdk.support.Generated; -/** One of: **ADD**, **REMOVE**, **UPDATE**. */ @Generated public enum SharedDataObjectUpdateAction { ADD, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedSecurableKind.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedSecurableKind.java new file mode 100755 index 000000000..18c4ee774 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedSecurableKind.java @@ -0,0 +1,13 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; + +/** The SecurableKind of a delta-shared object. */ +@Generated +public enum SharedSecurableKind { + FUNCTION_FEATURE_SPEC, + FUNCTION_REGISTERED_MODEL, + FUNCTION_STANDARD, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesAPI.java index f0af7a7c9..e33871615 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesAPI.java @@ -96,7 +96,7 @@ public Iterable list(ListSharesRequest request) { }); } - public com.databricks.sdk.service.catalog.PermissionsList sharePermissions(String name) { + public GetSharePermissionsResponse sharePermissions(String name) { return sharePermissions(new SharePermissionsRequest().setName(name)); } @@ -106,8 +106,7 @@ public com.databricks.sdk.service.catalog.PermissionsList sharePermissions(Strin *

Gets the permissions for a data share from the metastore. The caller must be a metastore * admin or the owner of the share. */ - public com.databricks.sdk.service.catalog.PermissionsList sharePermissions( - SharePermissionsRequest request) { + public GetSharePermissionsResponse sharePermissions(SharePermissionsRequest request) { return impl.sharePermissions(request); } @@ -138,8 +137,8 @@ public ShareInfo update(UpdateShare request) { return impl.update(request); } - public void updatePermissions(String name) { - updatePermissions(new UpdateSharePermissions().setName(name)); + public UpdateSharePermissionsResponse updatePermissions(String name) { + return updatePermissions(new UpdateSharePermissions().setName(name)); } /** @@ -148,11 +147,11 @@ public void updatePermissions(String name) { *

Updates the permissions for a data share in the metastore. The caller must be a metastore * admin or an owner of the share. * - *

For new recipient grants, the user must also be the owner of the recipients. recipient - * revocations do not require additional privileges. + *

For new recipient grants, the user must also be the recipient owner or metastore admin. + * recipient revocations do not require additional privileges. */ - public void updatePermissions(UpdateSharePermissions request) { - impl.updatePermissions(request); + public UpdateSharePermissionsResponse updatePermissions(UpdateSharePermissions request) { + return impl.updatePermissions(request); } public SharesService impl() { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesImpl.java index d32ff58cc..df926ca13 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesImpl.java @@ -36,7 +36,6 @@ public void delete(DeleteShareRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); apiClient.execute(req, DeleteResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -70,14 +69,13 @@ public ListSharesResponse list(ListSharesRequest request) { } @Override - public com.databricks.sdk.service.catalog.PermissionsList sharePermissions( - SharePermissionsRequest request) { + public GetSharePermissionsResponse sharePermissions(SharePermissionsRequest request) { String path = String.format("/api/2.1/unity-catalog/shares/%s/permissions", request.getName()); try { Request req = new Request("GET", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - return apiClient.execute(req, com.databricks.sdk.service.catalog.PermissionsList.class); + return apiClient.execute(req, GetSharePermissionsResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -98,14 +96,14 @@ public ShareInfo update(UpdateShare request) { } @Override - public void updatePermissions(UpdateSharePermissions request) { + public UpdateSharePermissionsResponse updatePermissions(UpdateSharePermissions request) { String path = String.format("/api/2.1/unity-catalog/shares/%s/permissions", request.getName()); try { Request req = new Request("PATCH", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, UpdatePermissionsResponse.class); + return apiClient.execute(req, UpdateSharePermissionsResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesService.java index 70ee38414..bd8aabe0f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesService.java @@ -54,8 +54,7 @@ public interface SharesService { *

Gets the permissions for a data share from the metastore. The caller must be a metastore * admin or the owner of the share. */ - com.databricks.sdk.service.catalog.PermissionsList sharePermissions( - SharePermissionsRequest sharePermissionsRequest); + GetSharePermissionsResponse sharePermissions(SharePermissionsRequest sharePermissionsRequest); /** * Update a share. @@ -84,8 +83,8 @@ com.databricks.sdk.service.catalog.PermissionsList sharePermissions( *

Updates the permissions for a data share in the metastore. The caller must be a metastore * admin or an owner of the share. * - *

For new recipient grants, the user must also be the owner of the recipients. recipient - * revocations do not require additional privileges. + *

For new recipient grants, the user must also be the recipient owner or metastore admin. + * recipient revocations do not require additional privileges. */ - void updatePermissions(UpdateSharePermissions updateSharePermissions); + UpdateSharePermissionsResponse updatePermissions(UpdateSharePermissions updateSharePermissions); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Table.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Table.java new file mode 100755 index 000000000..c0d4e744e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Table.java @@ -0,0 +1,166 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class Table { + /** The comment of the table. */ + @JsonProperty("comment") + private String comment; + + /** The id of the table. */ + @JsonProperty("id") + private String id; + + /** Internal information for D2D sharing that should not be disclosed to external users. */ + @JsonProperty("internal_attributes") + private TableInternalAttributes internalAttributes; + + /** The name of a materialized table. */ + @JsonProperty("materialized_table_name") + private String materializedTableName; + + /** The name of the table. */ + @JsonProperty("name") + private String name; + + /** The name of the schema that the table belongs to. */ + @JsonProperty("schema") + private String schema; + + /** The name of the share that the table belongs to. */ + @JsonProperty("share") + private String share; + + /** The id of the share that the table belongs to. */ + @JsonProperty("share_id") + private String shareId; + + /** The Tags of the table. */ + @JsonProperty("tags") + private Collection tags; + + public Table setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public Table setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public Table setInternalAttributes(TableInternalAttributes internalAttributes) { + this.internalAttributes = internalAttributes; + return this; + } + + public TableInternalAttributes getInternalAttributes() { + return internalAttributes; + } + + public Table setMaterializedTableName(String materializedTableName) { + this.materializedTableName = materializedTableName; + return this; + } + + public String getMaterializedTableName() { + return materializedTableName; + } + + public Table setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public Table setSchema(String schema) { + this.schema = schema; + return this; + } + + public String getSchema() { + return schema; + } + + public Table setShare(String share) { + this.share = share; + return this; + } + + public String getShare() { + return share; + } + + public Table setShareId(String shareId) { + this.shareId = shareId; + return this; + } + + public String getShareId() { + return shareId; + } + + public Table setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Table that = (Table) o; + return Objects.equals(comment, that.comment) + && Objects.equals(id, that.id) + && Objects.equals(internalAttributes, that.internalAttributes) + && Objects.equals(materializedTableName, that.materializedTableName) + && Objects.equals(name, that.name) + && Objects.equals(schema, that.schema) + && Objects.equals(share, that.share) + && Objects.equals(shareId, that.shareId) + && Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash( + comment, id, internalAttributes, materializedTableName, name, schema, share, shareId, tags); + } + + @Override + public String toString() { + return new ToStringer(Table.class) + .add("comment", comment) + .add("id", id) + .add("internalAttributes", internalAttributes) + .add("materializedTableName", materializedTableName) + .add("name", name) + .add("schema", schema) + .add("share", share) + .add("shareId", shareId) + .add("tags", tags) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TableInternalAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TableInternalAttributes.java new file mode 100755 index 000000000..b16c3d85a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TableInternalAttributes.java @@ -0,0 +1,97 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Internal information for D2D sharing that should not be disclosed to external users. */ +@Generated +public class TableInternalAttributes { + /** + * Will be populated in the reconciliation response for VIEW and FOREIGN_TABLE, with the value of + * the parent UC entity's storage_location, following the same logic as getManagedEntityPath in + * CreateStagingTableHandler, which is used to store the materialized table for a shared + * VIEW/FOREIGN_TABLE for D2O queries. The value will be used on the recipient side to be + * whitelisted when SEG is enabled on the workspace of the recipient, to allow the recipient users + * to query this shared VIEW/FOREIGN_TABLE. + */ + @JsonProperty("parent_storage_location") + private String parentStorageLocation; + + /** The cloud storage location of a shard table with DIRECTORY_BASED_TABLE type. */ + @JsonProperty("storage_location") + private String storageLocation; + + /** The type of the shared table. */ + @JsonProperty("type") + private TableInternalAttributesSharedTableType typeValue; + + /** The view definition of a shared view. DEPRECATED. */ + @JsonProperty("view_definition") + private String viewDefinition; + + public TableInternalAttributes setParentStorageLocation(String parentStorageLocation) { + this.parentStorageLocation = parentStorageLocation; + return this; + } + + public String getParentStorageLocation() { + return parentStorageLocation; + } + + public TableInternalAttributes setStorageLocation(String storageLocation) { + this.storageLocation = storageLocation; + return this; + } + + public String getStorageLocation() { + return storageLocation; + } + + public TableInternalAttributes setType(TableInternalAttributesSharedTableType typeValue) { + this.typeValue = typeValue; + return this; + } + + public TableInternalAttributesSharedTableType getType() { + return typeValue; + } + + public TableInternalAttributes setViewDefinition(String viewDefinition) { + this.viewDefinition = viewDefinition; + return this; + } + + public String getViewDefinition() { + return viewDefinition; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TableInternalAttributes that = (TableInternalAttributes) o; + return Objects.equals(parentStorageLocation, that.parentStorageLocation) + && Objects.equals(storageLocation, that.storageLocation) + && Objects.equals(typeValue, that.typeValue) + && Objects.equals(viewDefinition, that.viewDefinition); + } + + @Override + public int hashCode() { + return Objects.hash(parentStorageLocation, storageLocation, typeValue, viewDefinition); + } + + @Override + public String toString() { + return new ToStringer(TableInternalAttributes.class) + .add("parentStorageLocation", parentStorageLocation) + .add("storageLocation", storageLocation) + .add("typeValue", typeValue) + .add("viewDefinition", viewDefinition) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TableInternalAttributesSharedTableType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TableInternalAttributesSharedTableType.java new file mode 100755 index 000000000..e30b4cca6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TableInternalAttributesSharedTableType.java @@ -0,0 +1,15 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum TableInternalAttributesSharedTableType { + DIRECTORY_BASED_TABLE, + FILE_BASED_TABLE, + FOREIGN_TABLE, + MATERIALIZED_VIEW, + STREAMING_TABLE, + VIEW, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdatePermissionsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdatePermissionsResponse.java deleted file mode 100755 index 6ac5fa070..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdatePermissionsResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class UpdatePermissionsResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(UpdatePermissionsResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissions.java index 1c9052d9b..823f9e6b3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissions.java @@ -3,7 +3,6 @@ package com.databricks.sdk.service.sharing; import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; @@ -14,48 +13,20 @@ public class UpdateSharePermissions { /** Array of permission changes. */ @JsonProperty("changes") - private Collection changes; - - /** - * Maximum number of permissions to return. - when set to 0, the page length is set to a server - * configured value (recommended); - when set to a value greater than 0, the page length is the - * minimum of this value and a server configured value; - when set to a value less than 0, an - * invalid parameter error is returned; - If not set, all valid permissions are returned (not - * recommended). - Note: The number of returned permissions might be less than the specified - * max_results size, even zero. The only definitive indication that no further permissions can be - * fetched is when the next_page_token is unset from the response. - */ - @JsonIgnore - @QueryParam("max_results") - private Long maxResults; + private Collection changes; /** The name of the share. */ @JsonIgnore private String name; - /** Opaque pagination token to go to next page based on previous query. */ - @JsonIgnore - @QueryParam("page_token") - private String pageToken; - - public UpdateSharePermissions setChanges( - Collection changes) { + public UpdateSharePermissions setChanges(Collection changes) { this.changes = changes; return this; } - public Collection getChanges() { + public Collection getChanges() { return changes; } - public UpdateSharePermissions setMaxResults(Long maxResults) { - this.maxResults = maxResults; - return this; - } - - public Long getMaxResults() { - return maxResults; - } - public UpdateSharePermissions setName(String name) { this.name = name; return this; @@ -65,38 +36,24 @@ public String getName() { return name; } - public UpdateSharePermissions setPageToken(String pageToken) { - this.pageToken = pageToken; - return this; - } - - public String getPageToken() { - return pageToken; - } - @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; UpdateSharePermissions that = (UpdateSharePermissions) o; - return Objects.equals(changes, that.changes) - && Objects.equals(maxResults, that.maxResults) - && Objects.equals(name, that.name) - && Objects.equals(pageToken, that.pageToken); + return Objects.equals(changes, that.changes) && Objects.equals(name, that.name); } @Override public int hashCode() { - return Objects.hash(changes, maxResults, name, pageToken); + return Objects.hash(changes, name); } @Override public String toString() { return new ToStringer(UpdateSharePermissions.class) .add("changes", changes) - .add("maxResults", maxResults) .add("name", name) - .add("pageToken", pageToken) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissionsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissionsResponse.java new file mode 100755 index 000000000..f9f789ceb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissionsResponse.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class UpdateSharePermissionsResponse { + /** The privileges assigned to each principal */ + @JsonProperty("privilege_assignments") + private Collection privilegeAssignments; + + public UpdateSharePermissionsResponse setPrivilegeAssignments( + Collection privilegeAssignments) { + this.privilegeAssignments = privilegeAssignments; + return this; + } + + public Collection getPrivilegeAssignments() { + return privilegeAssignments; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateSharePermissionsResponse that = (UpdateSharePermissionsResponse) o; + return Objects.equals(privilegeAssignments, that.privilegeAssignments); + } + + @Override + public int hashCode() { + return Objects.hash(privilegeAssignments); + } + + @Override + public String toString() { + return new ToStringer(UpdateSharePermissionsResponse.class) + .add("privilegeAssignments", privilegeAssignments) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Volume.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Volume.java new file mode 100755 index 000000000..1224f0464 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Volume.java @@ -0,0 +1,153 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class Volume { + /** The comment of the volume. */ + @JsonProperty("comment") + private String comment; + + /** + * This id maps to the shared_volume_id in database Recipient needs shared_volume_id for recon to + * check if this volume is already in recipient's DB or not. + */ + @JsonProperty("id") + private String id; + + /** Internal attributes for D2D sharing that should not be disclosed to external users. */ + @JsonProperty("internal_attributes") + private VolumeInternalAttributes internalAttributes; + + /** The name of the volume. */ + @JsonProperty("name") + private String name; + + /** The name of the schema that the volume belongs to. */ + @JsonProperty("schema") + private String schema; + + /** The name of the share that the volume belongs to. */ + @JsonProperty("share") + private String share; + + /** / The id of the share that the volume belongs to. */ + @JsonProperty("share_id") + private String shareId; + + /** The tags of the volume. */ + @JsonProperty("tags") + private Collection tags; + + public Volume setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public Volume setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public Volume setInternalAttributes(VolumeInternalAttributes internalAttributes) { + this.internalAttributes = internalAttributes; + return this; + } + + public VolumeInternalAttributes getInternalAttributes() { + return internalAttributes; + } + + public Volume setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public Volume setSchema(String schema) { + this.schema = schema; + return this; + } + + public String getSchema() { + return schema; + } + + public Volume setShare(String share) { + this.share = share; + return this; + } + + public String getShare() { + return share; + } + + public Volume setShareId(String shareId) { + this.shareId = shareId; + return this; + } + + public String getShareId() { + return shareId; + } + + public Volume setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Volume that = (Volume) o; + return Objects.equals(comment, that.comment) + && Objects.equals(id, that.id) + && Objects.equals(internalAttributes, that.internalAttributes) + && Objects.equals(name, that.name) + && Objects.equals(schema, that.schema) + && Objects.equals(share, that.share) + && Objects.equals(shareId, that.shareId) + && Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash(comment, id, internalAttributes, name, schema, share, shareId, tags); + } + + @Override + public String toString() { + return new ToStringer(Volume.class) + .add("comment", comment) + .add("id", id) + .add("internalAttributes", internalAttributes) + .add("name", name) + .add("schema", schema) + .add("share", share) + .add("shareId", shareId) + .add("tags", tags) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/VolumeInternalAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/VolumeInternalAttributes.java new file mode 100755 index 000000000..c37dbac83 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/VolumeInternalAttributes.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Internal information for D2D sharing that should not be disclosed to external users. */ +@Generated +public class VolumeInternalAttributes { + /** The cloud storage location of the volume */ + @JsonProperty("storage_location") + private String storageLocation; + + /** The type of the shared volume. */ + @JsonProperty("type") + private String typeValue; + + public VolumeInternalAttributes setStorageLocation(String storageLocation) { + this.storageLocation = storageLocation; + return this; + } + + public String getStorageLocation() { + return storageLocation; + } + + public VolumeInternalAttributes setType(String typeValue) { + this.typeValue = typeValue; + return this; + } + + public String getType() { + return typeValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + VolumeInternalAttributes that = (VolumeInternalAttributes) o; + return Objects.equals(storageLocation, that.storageLocation) + && Objects.equals(typeValue, that.typeValue); + } + + @Override + public int hashCode() { + return Objects.hash(storageLocation, typeValue); + } + + @Override + public String toString() { + return new ToStringer(VolumeInternalAttributes.class) + .add("storageLocation", storageLocation) + .add("typeValue", typeValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequest.java index df402b694..dcd707a24 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequest.java @@ -15,6 +15,10 @@ public class QueryVectorIndexRequest { @JsonProperty("columns") private Collection columns; + /** Column names used to retrieve data to send to the reranker. */ + @JsonProperty("columns_to_rerank") + private Collection columnsToRerank; + /** * JSON string representing query filters. * @@ -60,6 +64,15 @@ public Collection getColumns() { return columns; } + public QueryVectorIndexRequest setColumnsToRerank(Collection columnsToRerank) { + this.columnsToRerank = columnsToRerank; + return this; + } + + public Collection getColumnsToRerank() { + return columnsToRerank; + } + public QueryVectorIndexRequest setFiltersJson(String filtersJson) { this.filtersJson = filtersJson; return this; @@ -129,6 +142,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; QueryVectorIndexRequest that = (QueryVectorIndexRequest) o; return Objects.equals(columns, that.columns) + && Objects.equals(columnsToRerank, that.columnsToRerank) && Objects.equals(filtersJson, that.filtersJson) && Objects.equals(indexName, that.indexName) && Objects.equals(numResults, that.numResults) @@ -142,6 +156,7 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( columns, + columnsToRerank, filtersJson, indexName, numResults, @@ -155,6 +170,7 @@ public int hashCode() { public String toString() { return new ToStringer(QueryVectorIndexRequest.class) .add("columns", columns) + .add("columnsToRerank", columnsToRerank) .add("filtersJson", filtersJson) .add("indexName", indexName) .add("numResults", numResults) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexResponse.java index c0a809cf8..c47a848af 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexResponse.java @@ -16,7 +16,8 @@ public class QueryVectorIndexResponse { /** * [Optional] Token that can be used in `QueryVectorIndexNextPage` API to get next page of * results. If more than 1000 results satisfy the query, they are returned in groups of 1000. - * Empty value means no more results. + * Empty value means no more results. The maximum number of results that can be returned is + * 10,000. */ @JsonProperty("next_page_token") private String nextPageToken; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportFormat.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportFormat.java index 07720c444..7df15c7ad 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportFormat.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportFormat.java @@ -4,12 +4,14 @@ import com.databricks.sdk.support.Generated; +/** The format for workspace import and export. */ @Generated public enum ExportFormat { AUTO, DBC, HTML, JUPYTER, + RAW, R_MARKDOWN, SOURCE, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportResponse.java index 7fafa53d1..61360554b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportResponse.java @@ -7,6 +7,10 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** + * The request field `direct_download` determines whether a JSON response or binary contents are + * returned by this endpoint. + */ @Generated public class ExportResponse { /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportFormat.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportFormat.java index 6b3de442a..1d690b98c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportFormat.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportFormat.java @@ -4,26 +4,14 @@ import com.databricks.sdk.support.Generated; -/** - * This specifies the format of the file to be imported. - * - *

The value is case sensitive. - * - *

- `AUTO`: The item is imported depending on an analysis of the item's extension and the header - * content provided in the request. If the item is imported as a notebook, then the item's extension - * is automatically removed. - `SOURCE`: The notebook or directory is imported as source code. - - * `HTML`: The notebook is imported as an HTML file. - `JUPYTER`: The notebook is imported as a - * Jupyter/IPython Notebook file. - `DBC`: The notebook is imported in Databricks archive format. - * Required for directories. - `R_MARKDOWN`: The notebook is imported from R Markdown format. - */ +/** The format for workspace import and export. */ @Generated public enum ImportFormat { - AUTO, // The item is imported depending on an analysis of the item's extension and - DBC, // The notebook is imported in archive format. Required for - // directories. - HTML, // The notebook is imported as an HTML file. - JUPYTER, // The notebook is imported as a Jupyter/IPython Notebook file. + AUTO, + DBC, + HTML, + JUPYTER, RAW, - R_MARKDOWN, // The notebook is imported from R Markdown format. - SOURCE, // The notebook or directory is imported as source code. + R_MARKDOWN, + SOURCE, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Language.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Language.java index 264c51609..6642f348b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Language.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Language.java @@ -4,7 +4,7 @@ import com.databricks.sdk.support.Generated; -/** The language of the object. This value is set only if the object type is `NOTEBOOK`. */ +/** The language of notebook. */ @Generated public enum Language { PYTHON, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ObjectInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ObjectInfo.java index 89417921e..77643ed12 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ObjectInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ObjectInfo.java @@ -7,13 +7,16 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** + * The information of the object in workspace. It will be returned by ``list`` and ``get-status``. + */ @Generated public class ObjectInfo { /** Only applicable to files. The creation UTC timestamp. */ @JsonProperty("created_at") private Long createdAt; - /** The language of the object. This value is set only if the object type is `NOTEBOOK`. */ + /** The language of the object. This value is set only if the object type is ``NOTEBOOK``. */ @JsonProperty("language") private Language language; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ObjectType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ObjectType.java index 22888b7a5..452f5d2d7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ObjectType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ObjectType.java @@ -4,19 +4,13 @@ import com.databricks.sdk.support.Generated; -/** - * The type of the object in workspace. - * - *

- `NOTEBOOK`: document that contains runnable code, visualizations, and explanatory text. - - * `DIRECTORY`: directory - `LIBRARY`: library - `FILE`: file - `REPO`: repository - `DASHBOARD`: - * Lakeview dashboard - */ +/** The type of the object in workspace. */ @Generated public enum ObjectType { - DASHBOARD, // Lakeview dashboard - DIRECTORY, // directory - FILE, // file - LIBRARY, // library - NOTEBOOK, // document that contains runnable code, visualizations, and explanatory text. - REPO, // repository + DASHBOARD, + DIRECTORY, + FILE, + LIBRARY, + NOTEBOOK, + REPO, } diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jobs/JobsImplTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jobs/JobsImplTest.java deleted file mode 100644 index e3964f651..000000000 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jobs/JobsImplTest.java +++ /dev/null @@ -1,165 +0,0 @@ -package com.databricks.sdk.service.jobs; - -import static org.mockito.ArgumentMatchers.*; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.databricks.sdk.core.ApiClient; -import com.databricks.sdk.core.http.Request; -import java.io.IOException; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -public class JobsImplTest { - - /* - * API 2.1 pinned endpoints check - * See: https://databricks.atlassian.net/browse/JOBS-19304 - */ - - @Test - public void testJobsCreateUsesApi2_1() throws IOException { - ApiClient apiClient = Mockito.mock(ApiClient.class); - String expectedPath = "/api/2.1/jobs/create"; - when(apiClient.execute(any(), any())).thenReturn(null); - when(apiClient.serialize(any())).thenReturn(""); - - JobsService jobs = new JobsImpl(apiClient); - jobs.create(new CreateJob()); - - verify(apiClient) - .execute( - argThat( - (Request req) -> { - if (!req.getMethod().equals("POST")) { - return false; - } - if (!req.getUrl().equals(expectedPath)) { - return false; - } - return true; - }), - eq(CreateResponse.class)); - } - - @Test - public void testJobsGetUsesApi2_1() throws IOException { - ApiClient apiClient = Mockito.mock(ApiClient.class); - String expectedPath = "/api/2.1/jobs/get"; - when(apiClient.execute(any(), any())).thenReturn(null); - - JobsService jobs = new JobsImpl(apiClient); - jobs.get(new GetJobRequest()); - - verify(apiClient) - .execute( - argThat( - (Request req) -> { - if (!req.getMethod().equals("GET")) { - return false; - } - if (!req.getUrl().equals(expectedPath)) { - return false; - } - return true; - }), - eq(Job.class)); - } - - @Test - public void testJobsListUsesApi2_1() throws IOException { - ApiClient apiClient = Mockito.mock(ApiClient.class); - String expectedPath = "/api/2.1/jobs/list"; - when(apiClient.execute(any(), any())).thenReturn(null); - - JobsService jobs = new JobsImpl(apiClient); - jobs.list(new ListJobsRequest()); - - verify(apiClient) - .execute( - argThat( - (Request req) -> { - if (!req.getMethod().equals("GET")) { - return false; - } - if (!req.getUrl().equals(expectedPath)) { - return false; - } - return true; - }), - eq(ListJobsResponse.class)); - } - - @Test - public void testJobsUpdateUsesApi2_1() throws IOException { - ApiClient apiClient = Mockito.mock(ApiClient.class); - String expectedPath = "/api/2.1/jobs/update"; - when(apiClient.execute(any(), any())).thenReturn(null); - - JobsService jobs = new JobsImpl(apiClient); - jobs.update(new UpdateJob()); - - verify(apiClient) - .execute( - argThat( - (Request req) -> { - if (!req.getMethod().equals("POST")) { - return false; - } - if (!req.getUrl().equals(expectedPath)) { - return false; - } - return true; - }), - eq(UpdateResponse.class)); - } - - @Test - public void testJobsResetUsesApi2_1() throws IOException { - ApiClient apiClient = Mockito.mock(ApiClient.class); - String expectedPath = "/api/2.1/jobs/reset"; - when(apiClient.execute(any(), any())).thenReturn(null); - when(apiClient.serialize(any())).thenReturn(""); - - JobsService jobs = new JobsImpl(apiClient); - jobs.reset(new ResetJob()); - - verify(apiClient) - .execute( - argThat( - (Request req) -> { - if (!req.getMethod().equals("POST")) { - return false; - } - if (!req.getUrl().equals(expectedPath)) { - return false; - } - return true; - }), - eq(ResetResponse.class)); - } - - @Test - public void testJobsListRunsUsesApi2_1() throws IOException { - ApiClient apiClient = Mockito.mock(ApiClient.class); - String expectedPath = "/api/2.1/jobs/runs/list"; - when(apiClient.execute(any(), any())).thenReturn(null); - - JobsService jobs = new JobsImpl(apiClient); - jobs.listRuns(new ListRunsRequest()); - - verify(apiClient) - .execute( - argThat( - (Request req) -> { - if (!req.getMethod().equals("GET")) { - return false; - } - if (!req.getUrl().equals(expectedPath)) { - return false; - } - return true; - }), - eq(ListRunsResponse.class)); - } -} diff --git a/tagging.py b/tagging.py index c57621fb4..5504bdd0e 100644 --- a/tagging.py +++ b/tagging.py @@ -14,6 +14,7 @@ NEXT_CHANGELOG_FILE_NAME = "NEXT_CHANGELOG.md" CHANGELOG_FILE_NAME = "CHANGELOG.md" PACKAGE_FILE_NAME = ".package.json" +CODEGEN_FILE_NAME = ".codegen.json" """ This script tags the release of the SDKs using a combination of the GitHub API and Git commands. It reads the local repository to determine necessary changes, updates changelogs, and creates tags. @@ -153,14 +154,14 @@ def update_version_references(tag_info: TagInfo) -> None: Code references are defined in .package.json files. """ - # Load version patterns from '.package.json' file - package_file_path = os.path.join(os.getcwd(), tag_info.package.path, PACKAGE_FILE_NAME) + # Load version patterns from '.codegen.json' file at the top level of the repository + package_file_path = os.path.join(os.getcwd(), CODEGEN_FILE_NAME) with open(package_file_path, 'r') as file: package_file = json.load(file) version = package_file.get('version') if not version: - print(f"Version not found in .package.json. Nothing to update.") + print(f"`version` not found in .codegen.json. Nothing to update.") return # Update the versions