diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 864d90a5f..3e6708180 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -6b2dbf5489ec706709fed80ee65caed7d10a2f38 \ No newline at end of file +2cee201b2e8d656f7306b2f9ec98edfa721e9829 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index c27c40bc7..067333c32 100755 --- a/.gitattributes +++ b/.gitattributes @@ -45,6 +45,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSer databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceServingEndpointServingEndpointPermission.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSqlWarehouse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSqlWarehouseSqlWarehousePermission.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurable.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurableUcSecurablePermission.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurableUcSecurableType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ApplicationState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ApplicationStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java linguist-generated=true @@ -170,10 +173,12 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsCredenti databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRole.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsSqsQueue.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureActiveDirectoryToken.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentity.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureQueueStorage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureServicePrincipal.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureUserDelegationSas.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshRequest.java linguist-generated=true @@ -282,6 +287,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLoc databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FailedStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FileEventQueue.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FindDatabaseInstanceByUidRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ForeignKeyConstraint.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionDependency.java linguist-generated=true @@ -298,6 +304,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAP databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpOauthToken.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpPubsub.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialAzureOptions.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialGcpOptions.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialRequest.java linguist-generated=true @@ -541,6 +548,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoo databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomCollaborator.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookReview.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookReviewNotebookReviewState.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookReviewNotebookReviewSubReason.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookTaskRun.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomOutputCatalog.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomOutputCatalogOutputCatalogStatus.java linguist-generated=true @@ -1436,6 +1446,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateExperiment databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateExperimentResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateLoggedModelRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateLoggedModelResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelVersionRequest.java linguist-generated=true @@ -1452,6 +1464,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentReq databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteExperiment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteExperimentResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelTagRequest.java linguist-generated=true @@ -1484,6 +1500,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.j databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FileInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingExperiment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingExperimentState.java linguist-generated=true @@ -1504,6 +1522,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetForecastingEx databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetHistoryRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLatestVersionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLatestVersionsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetMetricHistoryResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelResponse.java linguist-generated=true @@ -1525,6 +1545,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsReq databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListExperimentsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListExperimentsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListModelsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListModelsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListRegistryWebhooks.java linguist-generated=true @@ -1535,16 +1557,27 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogBatch.java li databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogBatchResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogInputs.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogInputsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogLoggedModelParamsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogLoggedModelParamsRequestResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetric.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetricResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogModel.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogModelResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogOutputsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogOutputsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogParam.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogParamResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModel.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelData.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelParameter.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelTag.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Metric.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Model.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelDatabricks.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelInput.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryService.java linguist-generated=true @@ -1583,6 +1616,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputs.java l databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunTag.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperiments.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperimentsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsDataset.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsOrderBy.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelVersionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelVersionsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelsRequest.java linguist-generated=true @@ -1591,6 +1628,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchRuns.java databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchRunsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTag.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTagResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelTagRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelTagResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelVersionTagRequest.java linguist-generated=true @@ -1957,6 +1996,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpA databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicy.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsService.java linguist-generated=true @@ -1994,6 +2034,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAc databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessListResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfigRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfiguration.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNotificationDestinationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateOboTokenRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateOboTokenResponse.java linguist-generated=true @@ -2033,6 +2074,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmP databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteLlmProxyPartnerPoweredWorkspaceResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkConnectivityConfigurationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkConnectivityConfigurationResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkPolicyRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkPolicyRpcResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNotificationDestinationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeletePersonalComputeSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeletePersonalComputeSettingResponse.java linguist-generated=true @@ -2065,6 +2108,15 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetw databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyRestrictionMode.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyStorageDestination.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicy.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyInternetDestination.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyInternetDestinationInternetDestinationType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementDryRunModeProductFilter.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementEnforcementMode.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyRestrictionMode.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyStorageDestination.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyStorageDestinationStorageDestinationType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressResourceType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EmailConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Empty.java linguist-generated=true @@ -2118,6 +2170,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProx databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredEnforceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetLlmProxyPartnerPoweredWorkspaceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNetworkConnectivityConfigurationRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNetworkPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNotificationDestinationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetPersonalComputeSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetPrivateEndpointRuleRequest.java linguist-generated=true @@ -2126,6 +2179,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetStatusR databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetTokenManagementRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetTokenPermissionLevelsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetTokenResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetWorkspaceNetworkOptionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java linguist-generated=true @@ -2134,6 +2188,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListIpAcce databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNccAzurePrivateEndpointRulesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkPoliciesRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkPoliciesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotificationDestinationsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotificationDestinationsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotificationDestinationsResult.java linguist-generated=true @@ -2166,6 +2222,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkCon databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfiguration.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPoliciesAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPoliciesImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPoliciesService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPolicyEgress.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NotificationDestination.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NotificationDestinationsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NotificationDestinationsImpl.java linguist-generated=true @@ -2230,19 +2290,27 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmP databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredEnforceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredWorkspaceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccAzurePrivateEndpointRulePublicRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNetworkPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNotificationDestinationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePersonalComputeSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRule.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateRestrictWorkspaceAdminsSettingRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateWorkspaceNetworkOptionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceConfAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceConfImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceConfService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkConfigurationAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkConfigurationImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkConfigurationService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkOption.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/AuthenticationType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnTypeName.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateFederationPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateProvider.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateRecipient.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateShare.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteFederationPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteProviderRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteRecipientRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteResponse.java linguist-generated=true @@ -2252,18 +2320,22 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharin databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingFunction.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingFunctionDependency.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingTableDependency.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FederationPolicy.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterInfos.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterMode.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetActivationUrlInfoRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetActivationUrlInfoResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetFederationPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetProviderRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetRecipientRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetRecipientSharePermissionsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetSharePermissionsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetShareRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/IpAccessList.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListFederationPoliciesRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListFederationPoliciesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderShareAssetsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderShareAssetsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderSharesResponse.java linguist-generated=true @@ -2274,6 +2346,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListRecipie databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListSharesRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListSharesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/NotebookFile.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/OidcFederationPolicy.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Partition.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PartitionValue.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PartitionValueOp.java linguist-generated=true @@ -2288,6 +2361,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersSe databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientActivationAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientActivationImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientActivationService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientProfile.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientTokenInfo.java linguist-generated=true @@ -2315,6 +2391,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesServi databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Table.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TableInternalAttributes.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TableInternalAttributesSharedTableType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateFederationPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateProvider.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateRecipient.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateShare.java linguist-generated=true diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 2fff09714..57a5274a0 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -12,3 +12,28 @@ * Added support for .cloud.databricks.mil domains ([#435](https://github.com/databricks/databricks-sdk-java/pull/435)). ### API Changes +* Added `accountClient.networkPolicies()` service and `accountClient.workspaceNetworkConfiguration()` service. +* Added `workspaceClient.recipientFederationPolicies()` service. +* Added `createLoggedModel()`, `deleteLoggedModel()`, `deleteLoggedModelTag()`, `finalizeLoggedModel()`, `getLoggedModel()`, `listLoggedModelArtifacts()`, `logLoggedModelParams()`, `logOutputs()`, `searchLoggedModels()` and `setLoggedModelTags()` methods for `workspaceClient.experiments()` service. +* Added `ucSecurable` field for `com.databricks.sdk.service.apps.AppResource`. +* Added `timeseriesColumns` field for `com.databricks.sdk.service.catalog.PrimaryKeyConstraint`. +* Added `reviewState`, `reviews` and `runnerCollaboratorAliases` fields for `com.databricks.sdk.service.cleanrooms.CleanRoomAssetNotebook`. +* Added `notebookEtag` and `notebookUpdatedAt` fields for `com.databricks.sdk.service.cleanrooms.CleanRoomNotebookTaskRun`. +* Added `rootPath` field for `com.databricks.sdk.service.pipelines.CreatePipeline`. +* Added `rootPath` field for `com.databricks.sdk.service.pipelines.EditPipeline`. +* Added `rootPath` field for `com.databricks.sdk.service.pipelines.PipelineSpec`. +* Added `materializationNamespace` field for `com.databricks.sdk.service.sharing.Table`. +* Added `omitPermissionsList` field for `com.databricks.sdk.service.sharing.UpdateSharePermissions`. +* Added `autoResolveDisplayName` field for `com.databricks.sdk.service.sql.UpdateAlertRequest`. +* Added `autoResolveDisplayName` field for `com.databricks.sdk.service.sql.UpdateQueryRequest`. +* Added `INTERNAL_CATALOG`, `MANAGED_ONLINE_CATALOG` and `UNKNOWN_CATALOG_TYPE` enum values for `com.databricks.sdk.service.catalog.CatalogType`. +* Added `CATALOG`, `CLEAN_ROOM`, `CONNECTION`, `CREDENTIAL`, `EXTERNAL_LOCATION`, `EXTERNAL_METADATA`, `FUNCTION`, `METASTORE`, `PIPELINE`, `PROVIDER`, `RECIPIENT`, `SCHEMA`, `SHARE`, `STAGING_TABLE`, `STORAGE_CREDENTIAL`, `TABLE`, `UNKNOWN_SECURABLE_TYPE` and `VOLUME` enum values for `com.databricks.sdk.service.catalog.SecurableType`. +* [Breaking] Changed `securableType` field for `com.databricks.sdk.service.catalog.CatalogInfo` to type `com.databricks.sdk.service.catalog.SecurableType` class. +* Changed `etag` and `name` fields for `com.databricks.sdk.service.iam.RuleSetResponse` to be required. +* Added `enableFileEvents` and `fileEventQueue` fields for `com.databricks.sdk.service.catalog.CreateExternalLocation`. +* Added `enableFileEvents` and `fileEventQueue` fields for `com.databricks.sdk.service.catalog.ExternalLocationInfo`. +* Added `enableFileEvents` and `fileEventQueue` fields for `com.databricks.sdk.service.catalog.UpdateExternalLocation`. +* Added `policyId` and `servicePrincipalId` fields for `com.databricks.sdk.service.oauth2.FederationPolicy`. +* [Breaking] Removed `accessPoint` field for `com.databricks.sdk.service.catalog.CreateExternalLocation`. +* [Breaking] Removed `accessPoint` field for `com.databricks.sdk.service.catalog.ExternalLocationInfo`. +* [Breaking] Removed `accessPoint` field for `com.databricks.sdk.service.catalog.UpdateExternalLocation`. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java index 85597b682..1c813a589 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java @@ -65,6 +65,10 @@ import com.databricks.sdk.service.settings.AccountSettingsService; import com.databricks.sdk.service.settings.NetworkConnectivityAPI; import com.databricks.sdk.service.settings.NetworkConnectivityService; +import com.databricks.sdk.service.settings.NetworkPoliciesAPI; +import com.databricks.sdk.service.settings.NetworkPoliciesService; +import com.databricks.sdk.service.settings.WorkspaceNetworkConfigurationAPI; +import com.databricks.sdk.service.settings.WorkspaceNetworkConfigurationService; import com.databricks.sdk.support.Generated; /** Entry point for accessing Databricks account-level APIs */ @@ -86,6 +90,7 @@ public class AccountClient { private AccountMetastoreAssignmentsAPI metastoreAssignmentsAPI; private AccountMetastoresAPI metastoresAPI; private NetworkConnectivityAPI networkConnectivityAPI; + private NetworkPoliciesAPI networkPoliciesAPI; private NetworksAPI networksAPI; private OAuthPublishedAppsAPI oAuthPublishedAppsAPI; private PrivateAccessAPI privateAccessAPI; @@ -100,6 +105,7 @@ public class AccountClient { private AccountUsersAPI usersAPI; private VpcEndpointsAPI vpcEndpointsAPI; private WorkspaceAssignmentAPI workspaceAssignmentAPI; + private WorkspaceNetworkConfigurationAPI workspaceNetworkConfigurationAPI; private WorkspacesAPI workspacesAPI; private BudgetsAPI budgetsAPI; @@ -124,6 +130,7 @@ public AccountClient(DatabricksConfig config) { metastoreAssignmentsAPI = new AccountMetastoreAssignmentsAPI(apiClient); metastoresAPI = new AccountMetastoresAPI(apiClient); networkConnectivityAPI = new NetworkConnectivityAPI(apiClient); + networkPoliciesAPI = new NetworkPoliciesAPI(apiClient); networksAPI = new NetworksAPI(apiClient); oAuthPublishedAppsAPI = new OAuthPublishedAppsAPI(apiClient); privateAccessAPI = new PrivateAccessAPI(apiClient); @@ -138,6 +145,7 @@ public AccountClient(DatabricksConfig config) { usersAPI = new AccountUsersAPI(apiClient); vpcEndpointsAPI = new VpcEndpointsAPI(apiClient); workspaceAssignmentAPI = new WorkspaceAssignmentAPI(apiClient); + workspaceNetworkConfigurationAPI = new WorkspaceNetworkConfigurationAPI(apiClient); workspacesAPI = new WorkspacesAPI(apiClient); budgetsAPI = new BudgetsAPI(apiClient); } @@ -384,6 +392,18 @@ public NetworkConnectivityAPI networkConnectivity() { return networkConnectivityAPI; } + /** + * These APIs manage network policies for this account. Network policies control which network + * destinations can be accessed from the Databricks environment. Each Databricks account includes + * a default policy named 'default-policy'. 'default-policy' is associated with any workspace + * lacking an explicit network policy assignment, and is automatically associated with each newly + * created workspace. 'default-policy' is reserved and cannot be deleted, but it can be updated to + * customize the default network access rules for your account. + */ + public NetworkPoliciesAPI networkPolicies() { + return networkPoliciesAPI; + } + /** * These APIs manage network configurations for customer-managed VPCs (optional). Its ID is used * when creating a new workspace if you use customer-managed VPCs. @@ -553,6 +573,17 @@ public WorkspaceAssignmentAPI workspaceAssignment() { return workspaceAssignmentAPI; } + /** + * These APIs allow configuration of network settings for Databricks workspaces. Each workspace is + * always associated with exactly one network policy that controls which network destinations can + * be accessed from the Databricks environment. By default, workspaces are associated with the + * 'default-policy' network policy. You cannot create or delete a workspace's network + * configuration, only update it to associate the workspace with a different policy. + */ + public WorkspaceNetworkConfigurationAPI workspaceNetworkConfiguration() { + return workspaceNetworkConfigurationAPI; + } + /** * These APIs manage workspaces for this account. A Databricks workspace is an environment for * accessing all of your Databricks assets. The workspace organizes objects (notebooks, libraries, @@ -723,6 +754,17 @@ public AccountClient withNetworkConnectivityAPI(NetworkConnectivityAPI networkCo return this; } + /** Replace the default NetworkPoliciesService with a custom implementation. */ + public AccountClient withNetworkPoliciesImpl(NetworkPoliciesService networkPolicies) { + return this.withNetworkPoliciesAPI(new NetworkPoliciesAPI(networkPolicies)); + } + + /** Replace the default NetworkPoliciesAPI with a custom implementation. */ + public AccountClient withNetworkPoliciesAPI(NetworkPoliciesAPI networkPolicies) { + this.networkPoliciesAPI = networkPolicies; + return this; + } + /** Replace the default NetworksService with a custom implementation. */ public AccountClient withNetworksImpl(NetworksService networks) { return this.withNetworksAPI(new NetworksAPI(networks)); @@ -891,6 +933,20 @@ public AccountClient withWorkspaceAssignmentAPI(WorkspaceAssignmentAPI workspace return this; } + /** Replace the default WorkspaceNetworkConfigurationService with a custom implementation. */ + public AccountClient withWorkspaceNetworkConfigurationImpl( + WorkspaceNetworkConfigurationService workspaceNetworkConfiguration) { + return this.withWorkspaceNetworkConfigurationAPI( + new WorkspaceNetworkConfigurationAPI(workspaceNetworkConfiguration)); + } + + /** Replace the default WorkspaceNetworkConfigurationAPI with a custom implementation. */ + public AccountClient withWorkspaceNetworkConfigurationAPI( + WorkspaceNetworkConfigurationAPI workspaceNetworkConfiguration) { + this.workspaceNetworkConfigurationAPI = workspaceNetworkConfiguration; + return this; + } + /** Replace the default WorkspacesService with a custom implementation. */ public AccountClient withWorkspacesImpl(WorkspacesService workspaces) { return this.withWorkspacesAPI(new WorkspacesAPI(workspaces)); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index cc1c270b5..f6ae23552 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -160,6 +160,8 @@ import com.databricks.sdk.service.sharing.ProvidersService; import com.databricks.sdk.service.sharing.RecipientActivationAPI; import com.databricks.sdk.service.sharing.RecipientActivationService; +import com.databricks.sdk.service.sharing.RecipientFederationPoliciesAPI; +import com.databricks.sdk.service.sharing.RecipientFederationPoliciesService; import com.databricks.sdk.service.sharing.RecipientsAPI; import com.databricks.sdk.service.sharing.RecipientsService; import com.databricks.sdk.service.sharing.SharesAPI; @@ -285,6 +287,7 @@ public class WorkspaceClient { private QueryVisualizationsAPI queryVisualizationsAPI; private QueryVisualizationsLegacyAPI queryVisualizationsLegacyAPI; private RecipientActivationAPI recipientActivationAPI; + private RecipientFederationPoliciesAPI recipientFederationPoliciesAPI; private RecipientsAPI recipientsAPI; private RedashConfigAPI redashConfigAPI; private RegisteredModelsAPI registeredModelsAPI; @@ -394,6 +397,7 @@ public WorkspaceClient(DatabricksConfig config) { queryVisualizationsAPI = new QueryVisualizationsAPI(apiClient); queryVisualizationsLegacyAPI = new QueryVisualizationsLegacyAPI(apiClient); recipientActivationAPI = new RecipientActivationAPI(apiClient); + recipientFederationPoliciesAPI = new RecipientFederationPoliciesAPI(apiClient); recipientsAPI = new RecipientsAPI(apiClient); redashConfigAPI = new RedashConfigAPI(apiClient); registeredModelsAPI = new RegisteredModelsAPI(apiClient); @@ -444,7 +448,7 @@ public AccessControlAPI accessControl() { * These APIs manage access rules on resources in an account. Currently, only grant rules are * supported. A grant rule specifies a role assigned to a set of principals. A list of rules * attached to a resource is called a rule set. A workspace must belong to an account for these - * APIs to work. + * APIs to work */ public AccountAccessControlProxyAPI accountAccessControlProxy() { return accountAccessControlProxyAPI; @@ -1289,6 +1293,33 @@ public RecipientActivationAPI recipientActivation() { return recipientActivationAPI; } + /** + * The Recipient Federation Policies APIs are only applicable in the open sharing model where the + * recipient object has the authentication type of `OIDC_RECIPIENT`, enabling data sharing from + * Databricks to non-Databricks recipients. OIDC Token Federation enables secure, secret-less + * authentication for accessing Delta Sharing servers. Users and applications authenticate using + * short-lived OIDC tokens issued by their own Identity Provider (IdP), such as Azure Entra ID or + * Okta, without the need for managing static credentials or client secrets. A federation policy + * defines how non-Databricks recipients authenticate using OIDC tokens. It validates the OIDC + * claims in federated tokens and is set at the recipient level. The caller must be the owner of + * the recipient to create or manage a federation policy. Federation policies support the + * following scenarios: - User-to-Machine (U2M) flow: A user accesses Delta Shares using their own + * identity, such as connecting through PowerBI Delta Sharing Client. - Machine-to-Machine (M2M) + * flow: An application accesses Delta Shares using its own identity, typically for automation + * tasks like nightly jobs through Python Delta Sharing Client. OIDC Token Federation enables + * fine-grained access control, supports Multi-Factor Authentication (MFA), and enhances security + * by minimizing the risk of credential leakage through the use of short-lived, expiring tokens. + * It is designed for strong identity governance, secure cross-platform data sharing, and reduced + * operational overhead for credential management. + * + *

For more information, see + * https://www.databricks.com/blog/announcing-oidc-token-federation-enhanced-delta-sharing-security + * and https://docs.databricks.com/en/delta-sharing/create-recipient-oidc-fed + */ + public RecipientFederationPoliciesAPI recipientFederationPolicies() { + return recipientFederationPoliciesAPI; + } + /** * A recipient is an object you create using :method:recipients/create to represent an * organization which you want to allow access shares. The way how sharing works differs depending @@ -2557,6 +2588,20 @@ public WorkspaceClient withRecipientActivationAPI(RecipientActivationAPI recipie return this; } + /** Replace the default RecipientFederationPoliciesService with a custom implementation. */ + public WorkspaceClient withRecipientFederationPoliciesImpl( + RecipientFederationPoliciesService recipientFederationPolicies) { + return this.withRecipientFederationPoliciesAPI( + new RecipientFederationPoliciesAPI(recipientFederationPolicies)); + } + + /** Replace the default RecipientFederationPoliciesAPI with a custom implementation. */ + public WorkspaceClient withRecipientFederationPoliciesAPI( + RecipientFederationPoliciesAPI recipientFederationPolicies) { + this.recipientFederationPoliciesAPI = recipientFederationPolicies; + return this; + } + /** Replace the default RecipientsService with a custom implementation. */ public WorkspaceClient withRecipientsImpl(RecipientsService recipients) { return this.withRecipientsAPI(new RecipientsAPI(recipients)); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java index a0728e385..727e187f3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java @@ -33,6 +33,10 @@ public class AppResource { @JsonProperty("sql_warehouse") private AppResourceSqlWarehouse sqlWarehouse; + /** */ + @JsonProperty("uc_securable") + private AppResourceUcSecurable ucSecurable; + public AppResource setDescription(String description) { this.description = description; return this; @@ -87,6 +91,15 @@ public AppResourceSqlWarehouse getSqlWarehouse() { return sqlWarehouse; } + public AppResource setUcSecurable(AppResourceUcSecurable ucSecurable) { + this.ucSecurable = ucSecurable; + return this; + } + + public AppResourceUcSecurable getUcSecurable() { + return ucSecurable; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -97,12 +110,13 @@ public boolean equals(Object o) { && Objects.equals(name, that.name) && Objects.equals(secret, that.secret) && Objects.equals(servingEndpoint, that.servingEndpoint) - && Objects.equals(sqlWarehouse, that.sqlWarehouse); + && Objects.equals(sqlWarehouse, that.sqlWarehouse) + && Objects.equals(ucSecurable, that.ucSecurable); } @Override public int hashCode() { - return Objects.hash(description, job, name, secret, servingEndpoint, sqlWarehouse); + return Objects.hash(description, job, name, secret, servingEndpoint, sqlWarehouse, ucSecurable); } @Override @@ -114,6 +128,7 @@ public String toString() { .add("secret", secret) .add("servingEndpoint", servingEndpoint) .add("sqlWarehouse", sqlWarehouse) + .add("ucSecurable", ucSecurable) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurable.java new file mode 100755 index 000000000..8944df179 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurable.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AppResourceUcSecurable { + /** */ + @JsonProperty("permission") + private AppResourceUcSecurableUcSecurablePermission permission; + + /** */ + @JsonProperty("securable_full_name") + private String securableFullName; + + /** */ + @JsonProperty("securable_type") + private AppResourceUcSecurableUcSecurableType securableType; + + public AppResourceUcSecurable setPermission( + AppResourceUcSecurableUcSecurablePermission permission) { + this.permission = permission; + return this; + } + + public AppResourceUcSecurableUcSecurablePermission getPermission() { + return permission; + } + + public AppResourceUcSecurable setSecurableFullName(String securableFullName) { + this.securableFullName = securableFullName; + return this; + } + + public String getSecurableFullName() { + return securableFullName; + } + + public AppResourceUcSecurable setSecurableType( + AppResourceUcSecurableUcSecurableType securableType) { + this.securableType = securableType; + return this; + } + + public AppResourceUcSecurableUcSecurableType getSecurableType() { + return securableType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AppResourceUcSecurable that = (AppResourceUcSecurable) o; + return Objects.equals(permission, that.permission) + && Objects.equals(securableFullName, that.securableFullName) + && Objects.equals(securableType, that.securableType); + } + + @Override + public int hashCode() { + return Objects.hash(permission, securableFullName, securableType); + } + + @Override + public String toString() { + return new ToStringer(AppResourceUcSecurable.class) + .add("permission", permission) + .add("securableFullName", securableFullName) + .add("securableType", securableType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurableUcSecurablePermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurableUcSecurablePermission.java new file mode 100755 index 000000000..551de2d21 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurableUcSecurablePermission.java @@ -0,0 +1,11 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum AppResourceUcSecurableUcSecurablePermission { + READ_VOLUME, + WRITE_VOLUME, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurableUcSecurableType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurableUcSecurableType.java new file mode 100755 index 000000000..b05b5435e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurableUcSecurableType.java @@ -0,0 +1,10 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum AppResourceUcSecurableUcSecurableType { + VOLUME, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsSqsQueue.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsSqsQueue.java new file mode 100755 index 000000000..ebf035131 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsSqsQueue.java @@ -0,0 +1,62 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AwsSqsQueue { + /** Unique identifier included in the name of file events managed cloud resources. */ + @JsonProperty("managed_resource_id") + private String managedResourceId; + + /** + * The AQS queue url in the format https://sqs.{region}.amazonaws.com/{account id}/{queue name} + * REQUIRED for provided_sqs. + */ + @JsonProperty("queue_url") + private String queueUrl; + + public AwsSqsQueue setManagedResourceId(String managedResourceId) { + this.managedResourceId = managedResourceId; + return this; + } + + public String getManagedResourceId() { + return managedResourceId; + } + + public AwsSqsQueue setQueueUrl(String queueUrl) { + this.queueUrl = queueUrl; + return this; + } + + public String getQueueUrl() { + return queueUrl; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AwsSqsQueue that = (AwsSqsQueue) o; + return Objects.equals(managedResourceId, that.managedResourceId) + && Objects.equals(queueUrl, that.queueUrl); + } + + @Override + public int hashCode() { + return Objects.hash(managedResourceId, queueUrl); + } + + @Override + public String toString() { + return new ToStringer(AwsSqsQueue.class) + .add("managedResourceId", managedResourceId) + .add("queueUrl", queueUrl) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureQueueStorage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureQueueStorage.java new file mode 100755 index 000000000..060de1960 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureQueueStorage.java @@ -0,0 +1,98 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AzureQueueStorage { + /** Unique identifier included in the name of file events managed cloud resources. */ + @JsonProperty("managed_resource_id") + private String managedResourceId; + + /** + * The AQS queue url in the format https://{storage account}.queue.core.windows.net/{queue name} + * REQUIRED for provided_aqs. + */ + @JsonProperty("queue_url") + private String queueUrl; + + /** + * The resource group for the queue, event grid subscription, and external location storage + * account. ONLY REQUIRED for locations with a service principal storage credential + */ + @JsonProperty("resource_group") + private String resourceGroup; + + /** + * OPTIONAL: The subscription id for the queue, event grid subscription, and external location + * storage account. REQUIRED for locations with a service principal storage credential + */ + @JsonProperty("subscription_id") + private String subscriptionId; + + public AzureQueueStorage setManagedResourceId(String managedResourceId) { + this.managedResourceId = managedResourceId; + return this; + } + + public String getManagedResourceId() { + return managedResourceId; + } + + public AzureQueueStorage setQueueUrl(String queueUrl) { + this.queueUrl = queueUrl; + return this; + } + + public String getQueueUrl() { + return queueUrl; + } + + public AzureQueueStorage setResourceGroup(String resourceGroup) { + this.resourceGroup = resourceGroup; + return this; + } + + public String getResourceGroup() { + return resourceGroup; + } + + public AzureQueueStorage setSubscriptionId(String subscriptionId) { + this.subscriptionId = subscriptionId; + return this; + } + + public String getSubscriptionId() { + return subscriptionId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AzureQueueStorage that = (AzureQueueStorage) o; + return Objects.equals(managedResourceId, that.managedResourceId) + && Objects.equals(queueUrl, that.queueUrl) + && Objects.equals(resourceGroup, that.resourceGroup) + && Objects.equals(subscriptionId, that.subscriptionId); + } + + @Override + public int hashCode() { + return Objects.hash(managedResourceId, queueUrl, resourceGroup, subscriptionId); + } + + @Override + public String toString() { + return new ToStringer(AzureQueueStorage.class) + .add("managedResourceId", managedResourceId) + .add("queueUrl", queueUrl) + .add("resourceGroup", resourceGroup) + .add("subscriptionId", subscriptionId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java index 82be0cd83..ccdc57264 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java @@ -89,9 +89,9 @@ public class CatalogInfo { @JsonProperty("provisioning_info") private ProvisioningInfo provisioningInfo; - /** */ + /** The type of Unity Catalog securable. */ @JsonProperty("securable_type") - private String securableType; + private SecurableType securableType; /** The name of the share under the share provider. */ @JsonProperty("share_name") @@ -268,12 +268,12 @@ public ProvisioningInfo getProvisioningInfo() { return provisioningInfo; } - public CatalogInfo setSecurableType(String securableType) { + public CatalogInfo setSecurableType(SecurableType securableType) { this.securableType = securableType; return this; } - public String getSecurableType() { + public SecurableType getSecurableType() { return securableType; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogIsolationMode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogIsolationMode.java index 04a2037e4..cd8bb5e3c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogIsolationMode.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogIsolationMode.java @@ -4,9 +4,6 @@ import com.databricks.sdk.support.Generated; -/** - * Whether the current securable is accessible from all workspaces or a specific set of workspaces. - */ @Generated public enum CatalogIsolationMode { ISOLATED, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogType.java index 47f4d0ce0..992b920ab 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogType.java @@ -9,6 +9,9 @@ public enum CatalogType { DELTASHARING_CATALOG, FOREIGN_CATALOG, + INTERNAL_CATALOG, MANAGED_CATALOG, + MANAGED_ONLINE_CATALOG, SYSTEM_CATALOG, + UNKNOWN_CATALOG_TYPE, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java index 33695c99d..744c57db6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java @@ -83,10 +83,6 @@ public CatalogInfo get(GetCatalogRequest request) { * specific ordering of the elements in the array. */ public Iterable list(ListCatalogsRequest request) { - - if (request.getMaxResults() == null) { - request.setMaxResults(0L); - } return new Paginator<>( request, impl::list, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalLocation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalLocation.java index d497ab880..585c5876d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalLocation.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalLocation.java @@ -9,10 +9,6 @@ @Generated public class CreateExternalLocation { - /** The AWS access point to use when accesing s3 for this external location. */ - @JsonProperty("access_point") - private String accessPoint; - /** User-provided free-form text description. */ @JsonProperty("comment") private String comment; @@ -21,6 +17,10 @@ public class CreateExternalLocation { @JsonProperty("credential_name") private String credentialName; + /** [Create:OPT Update:OPT] Whether to enable file events on this external location. */ + @JsonProperty("enable_file_events") + private Boolean enableFileEvents; + /** Encryption options that apply to clients connecting to cloud storage. */ @JsonProperty("encryption_details") private EncryptionDetails encryptionDetails; @@ -33,6 +33,10 @@ public class CreateExternalLocation { @JsonProperty("fallback") private Boolean fallback; + /** [Create:OPT Update:OPT] File event queue settings. */ + @JsonProperty("file_event_queue") + private FileEventQueue fileEventQueue; + /** Name of the external location. */ @JsonProperty("name") private String name; @@ -49,15 +53,6 @@ public class CreateExternalLocation { @JsonProperty("url") private String url; - public CreateExternalLocation setAccessPoint(String accessPoint) { - this.accessPoint = accessPoint; - return this; - } - - public String getAccessPoint() { - return accessPoint; - } - public CreateExternalLocation setComment(String comment) { this.comment = comment; return this; @@ -76,6 +71,15 @@ public String getCredentialName() { return credentialName; } + public CreateExternalLocation setEnableFileEvents(Boolean enableFileEvents) { + this.enableFileEvents = enableFileEvents; + return this; + } + + public Boolean getEnableFileEvents() { + return enableFileEvents; + } + public CreateExternalLocation setEncryptionDetails(EncryptionDetails encryptionDetails) { this.encryptionDetails = encryptionDetails; return this; @@ -94,6 +98,15 @@ public Boolean getFallback() { return fallback; } + public CreateExternalLocation setFileEventQueue(FileEventQueue fileEventQueue) { + this.fileEventQueue = fileEventQueue; + return this; + } + + public FileEventQueue getFileEventQueue() { + return fileEventQueue; + } + public CreateExternalLocation setName(String name) { this.name = name; return this; @@ -135,11 +148,12 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CreateExternalLocation that = (CreateExternalLocation) o; - return Objects.equals(accessPoint, that.accessPoint) - && Objects.equals(comment, that.comment) + return Objects.equals(comment, that.comment) && Objects.equals(credentialName, that.credentialName) + && Objects.equals(enableFileEvents, that.enableFileEvents) && Objects.equals(encryptionDetails, that.encryptionDetails) && Objects.equals(fallback, that.fallback) + && Objects.equals(fileEventQueue, that.fileEventQueue) && Objects.equals(name, that.name) && Objects.equals(readOnly, that.readOnly) && Objects.equals(skipValidation, that.skipValidation) @@ -149,11 +163,12 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - accessPoint, comment, credentialName, + enableFileEvents, encryptionDetails, fallback, + fileEventQueue, name, readOnly, skipValidation, @@ -163,11 +178,12 @@ public int hashCode() { @Override public String toString() { return new ToStringer(CreateExternalLocation.class) - .add("accessPoint", accessPoint) .add("comment", comment) .add("credentialName", credentialName) + .add("enableFileEvents", enableFileEvents) .add("encryptionDetails", encryptionDetails) .add("fallback", fallback) + .add("fileEventQueue", fileEventQueue) .add("name", name) .add("readOnly", readOnly) .add("skipValidation", skipValidation) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnablePredictiveOptimization.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnablePredictiveOptimization.java index 801f54d44..3cef5c8e4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnablePredictiveOptimization.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnablePredictiveOptimization.java @@ -4,7 +4,6 @@ import com.databricks.sdk.support.Generated; -/** Whether predictive optimization should be enabled for this object and objects under it. */ @Generated public enum EnablePredictiveOptimization { DISABLE, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java index 3fe7a3650..b8a003e2b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java @@ -9,10 +9,6 @@ @Generated public class ExternalLocationInfo { - /** The AWS access point to use when accesing s3 for this external location. */ - @JsonProperty("access_point") - private String accessPoint; - /** * Indicates whether the principal is limited to retrieving metadata for the associated object * through the BROWSE privilege when include_browse is enabled in the request. @@ -40,6 +36,10 @@ public class ExternalLocationInfo { @JsonProperty("credential_name") private String credentialName; + /** [Create:OPT Update:OPT] Whether to enable file events on this external location. */ + @JsonProperty("enable_file_events") + private Boolean enableFileEvents; + /** Encryption options that apply to clients connecting to cloud storage. */ @JsonProperty("encryption_details") private EncryptionDetails encryptionDetails; @@ -52,6 +52,10 @@ public class ExternalLocationInfo { @JsonProperty("fallback") private Boolean fallback; + /** [Create:OPT Update:OPT] File event queue settings. */ + @JsonProperty("file_event_queue") + private FileEventQueue fileEventQueue; + /** */ @JsonProperty("isolation_mode") private IsolationMode isolationMode; @@ -84,15 +88,6 @@ public class ExternalLocationInfo { @JsonProperty("url") private String url; - public ExternalLocationInfo setAccessPoint(String accessPoint) { - this.accessPoint = accessPoint; - return this; - } - - public String getAccessPoint() { - return accessPoint; - } - public ExternalLocationInfo setBrowseOnly(Boolean browseOnly) { this.browseOnly = browseOnly; return this; @@ -147,6 +142,15 @@ public String getCredentialName() { return credentialName; } + public ExternalLocationInfo setEnableFileEvents(Boolean enableFileEvents) { + this.enableFileEvents = enableFileEvents; + return this; + } + + public Boolean getEnableFileEvents() { + return enableFileEvents; + } + public ExternalLocationInfo setEncryptionDetails(EncryptionDetails encryptionDetails) { this.encryptionDetails = encryptionDetails; return this; @@ -165,6 +169,15 @@ public Boolean getFallback() { return fallback; } + public ExternalLocationInfo setFileEventQueue(FileEventQueue fileEventQueue) { + this.fileEventQueue = fileEventQueue; + return this; + } + + public FileEventQueue getFileEventQueue() { + return fileEventQueue; + } + public ExternalLocationInfo setIsolationMode(IsolationMode isolationMode) { this.isolationMode = isolationMode; return this; @@ -242,15 +255,16 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ExternalLocationInfo that = (ExternalLocationInfo) o; - return Objects.equals(accessPoint, that.accessPoint) - && Objects.equals(browseOnly, that.browseOnly) + return Objects.equals(browseOnly, that.browseOnly) && Objects.equals(comment, that.comment) && Objects.equals(createdAt, that.createdAt) && Objects.equals(createdBy, that.createdBy) && Objects.equals(credentialId, that.credentialId) && Objects.equals(credentialName, that.credentialName) + && Objects.equals(enableFileEvents, that.enableFileEvents) && Objects.equals(encryptionDetails, that.encryptionDetails) && Objects.equals(fallback, that.fallback) + && Objects.equals(fileEventQueue, that.fileEventQueue) && Objects.equals(isolationMode, that.isolationMode) && Objects.equals(metastoreId, that.metastoreId) && Objects.equals(name, that.name) @@ -264,15 +278,16 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - accessPoint, browseOnly, comment, createdAt, createdBy, credentialId, credentialName, + enableFileEvents, encryptionDetails, fallback, + fileEventQueue, isolationMode, metastoreId, name, @@ -286,15 +301,16 @@ public int hashCode() { @Override public String toString() { return new ToStringer(ExternalLocationInfo.class) - .add("accessPoint", accessPoint) .add("browseOnly", browseOnly) .add("comment", comment) .add("createdAt", createdAt) .add("createdBy", createdBy) .add("credentialId", credentialId) .add("credentialName", credentialName) + .add("enableFileEvents", enableFileEvents) .add("encryptionDetails", encryptionDetails) .add("fallback", fallback) + .add("fileEventQueue", fileEventQueue) .add("isolationMode", isolationMode) .add("metastoreId", metastoreId) .add("name", name) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java index 0edf3c3eb..a5c5d698e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java @@ -88,10 +88,6 @@ public ExternalLocationInfo get(GetExternalLocationRequest request) { * elements in the array. */ public Iterable list(ListExternalLocationsRequest request) { - - if (request.getMaxResults() == null) { - request.setMaxResults(0L); - } return new Paginator<>( request, impl::list, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FileEventQueue.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FileEventQueue.java new file mode 100755 index 000000000..0fd645462 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FileEventQueue.java @@ -0,0 +1,120 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class FileEventQueue { + /** */ + @JsonProperty("managed_aqs") + private AzureQueueStorage managedAqs; + + /** */ + @JsonProperty("managed_pubsub") + private GcpPubsub managedPubsub; + + /** */ + @JsonProperty("managed_sqs") + private AwsSqsQueue managedSqs; + + /** */ + @JsonProperty("provided_aqs") + private AzureQueueStorage providedAqs; + + /** */ + @JsonProperty("provided_pubsub") + private GcpPubsub providedPubsub; + + /** */ + @JsonProperty("provided_sqs") + private AwsSqsQueue providedSqs; + + public FileEventQueue setManagedAqs(AzureQueueStorage managedAqs) { + this.managedAqs = managedAqs; + return this; + } + + public AzureQueueStorage getManagedAqs() { + return managedAqs; + } + + public FileEventQueue setManagedPubsub(GcpPubsub managedPubsub) { + this.managedPubsub = managedPubsub; + return this; + } + + public GcpPubsub getManagedPubsub() { + return managedPubsub; + } + + public FileEventQueue setManagedSqs(AwsSqsQueue managedSqs) { + this.managedSqs = managedSqs; + return this; + } + + public AwsSqsQueue getManagedSqs() { + return managedSqs; + } + + public FileEventQueue setProvidedAqs(AzureQueueStorage providedAqs) { + this.providedAqs = providedAqs; + return this; + } + + public AzureQueueStorage getProvidedAqs() { + return providedAqs; + } + + public FileEventQueue setProvidedPubsub(GcpPubsub providedPubsub) { + this.providedPubsub = providedPubsub; + return this; + } + + public GcpPubsub getProvidedPubsub() { + return providedPubsub; + } + + public FileEventQueue setProvidedSqs(AwsSqsQueue providedSqs) { + this.providedSqs = providedSqs; + return this; + } + + public AwsSqsQueue getProvidedSqs() { + return providedSqs; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FileEventQueue that = (FileEventQueue) o; + return Objects.equals(managedAqs, that.managedAqs) + && Objects.equals(managedPubsub, that.managedPubsub) + && Objects.equals(managedSqs, that.managedSqs) + && Objects.equals(providedAqs, that.providedAqs) + && Objects.equals(providedPubsub, that.providedPubsub) + && Objects.equals(providedSqs, that.providedSqs); + } + + @Override + public int hashCode() { + return Objects.hash( + managedAqs, managedPubsub, managedSqs, providedAqs, providedPubsub, providedSqs); + } + + @Override + public String toString() { + return new ToStringer(FileEventQueue.class) + .add("managedAqs", managedAqs) + .add("managedPubsub", managedPubsub) + .add("managedSqs", managedSqs) + .add("providedAqs", providedAqs) + .add("providedPubsub", providedPubsub) + .add("providedSqs", providedSqs) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpPubsub.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpPubsub.java new file mode 100755 index 000000000..b834a9ff0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpPubsub.java @@ -0,0 +1,62 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class GcpPubsub { + /** Unique identifier included in the name of file events managed cloud resources. */ + @JsonProperty("managed_resource_id") + private String managedResourceId; + + /** + * The Pub/Sub subscription name in the format projects/{project}/subscriptions/{subscription + * name} REQUIRED for provided_pubsub. + */ + @JsonProperty("subscription_name") + private String subscriptionName; + + public GcpPubsub setManagedResourceId(String managedResourceId) { + this.managedResourceId = managedResourceId; + return this; + } + + public String getManagedResourceId() { + return managedResourceId; + } + + public GcpPubsub setSubscriptionName(String subscriptionName) { + this.subscriptionName = subscriptionName; + return this; + } + + public String getSubscriptionName() { + return subscriptionName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GcpPubsub that = (GcpPubsub) o; + return Objects.equals(managedResourceId, that.managedResourceId) + && Objects.equals(subscriptionName, that.subscriptionName); + } + + @Override + public int hashCode() { + return Objects.hash(managedResourceId, subscriptionName); + } + + @Override + public String toString() { + return new ToStringer(GcpPubsub.class) + .add("managedResourceId", managedResourceId) + .add("subscriptionName", subscriptionName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrimaryKeyConstraint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrimaryKeyConstraint.java index 093331195..5d13fbbd4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrimaryKeyConstraint.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrimaryKeyConstraint.java @@ -18,6 +18,10 @@ public class PrimaryKeyConstraint { @JsonProperty("name") private String name; + /** Column names that represent a timeseries. */ + @JsonProperty("timeseries_columns") + private Collection timeseriesColumns; + public PrimaryKeyConstraint setChildColumns(Collection childColumns) { this.childColumns = childColumns; return this; @@ -36,17 +40,28 @@ public String getName() { return name; } + public PrimaryKeyConstraint setTimeseriesColumns(Collection timeseriesColumns) { + this.timeseriesColumns = timeseriesColumns; + return this; + } + + public Collection getTimeseriesColumns() { + return timeseriesColumns; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PrimaryKeyConstraint that = (PrimaryKeyConstraint) o; - return Objects.equals(childColumns, that.childColumns) && Objects.equals(name, that.name); + return Objects.equals(childColumns, that.childColumns) + && Objects.equals(name, that.name) + && Objects.equals(timeseriesColumns, that.timeseriesColumns); } @Override public int hashCode() { - return Objects.hash(childColumns, name); + return Objects.hash(childColumns, name, timeseriesColumns); } @Override @@ -54,6 +69,7 @@ public String toString() { return new ToStringer(PrimaryKeyConstraint.class) .add("childColumns", childColumns) .add("name", name) + .add("timeseriesColumns", timeseriesColumns) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ProvisioningInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ProvisioningInfo.java index eb146ab62..d124c65b5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ProvisioningInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ProvisioningInfo.java @@ -10,7 +10,7 @@ /** Status of an asynchronously provisioned resource. */ @Generated public class ProvisioningInfo { - /** */ + /** The provisioning state of the resource. */ @JsonProperty("state") private ProvisioningInfoState state; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java index dc899512e..9358ca802 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java @@ -41,7 +41,7 @@ public class SchemaInfo { @JsonProperty("effective_predictive_optimization_flag") private EffectivePredictiveOptimizationFlag effectivePredictiveOptimizationFlag; - /** Whether predictive optimization should be enabled for this object and objects under it. */ + /** */ @JsonProperty("enable_predictive_optimization") private EnablePredictiveOptimization enablePredictiveOptimization; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableType.java index 1ad9e6fbd..eadc6dfa3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableType.java @@ -4,7 +4,7 @@ import com.databricks.sdk.support.Generated; -/** The type of Unity Catalog securable */ +/** The type of Unity Catalog securable. */ @Generated public enum SecurableType { CATALOG, @@ -12,6 +12,7 @@ public enum SecurableType { CONNECTION, CREDENTIAL, EXTERNAL_LOCATION, + EXTERNAL_METADATA, FUNCTION, METASTORE, PIPELINE, @@ -19,7 +20,9 @@ public enum SecurableType { RECIPIENT, SCHEMA, SHARE, + STAGING_TABLE, STORAGE_CREDENTIAL, TABLE, + UNKNOWN_SECURABLE_TYPE, VOLUME, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SseEncryptionDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SseEncryptionDetails.java index dbcfab688..d0325a25d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SseEncryptionDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SseEncryptionDetails.java @@ -10,11 +10,14 @@ /** Server-Side Encryption properties for clients communicating with AWS s3. */ @Generated public class SseEncryptionDetails { - /** The type of key encryption to use (affects headers from s3 client). */ + /** Sets the value of the 'x-amz-server-side-encryption' header in S3 request. */ @JsonProperty("algorithm") private SseEncryptionDetailsAlgorithm algorithm; - /** When algorithm is **AWS_SSE_KMS** this field specifies the ARN of the SSE key to use. */ + /** + * Optional. The ARN of the SSE-KMS key used with the S3 location, when algorithm = "SSE-KMS". + * Sets the value of the 'x-amz-server-side-encryption-aws-kms-key-id' header. + */ @JsonProperty("aws_kms_key_arn") private String awsKmsKeyArn; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SseEncryptionDetailsAlgorithm.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SseEncryptionDetailsAlgorithm.java index 50c6ee445..043bd704d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SseEncryptionDetailsAlgorithm.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SseEncryptionDetailsAlgorithm.java @@ -4,7 +4,6 @@ import com.databricks.sdk.support.Generated; -/** The type of key encryption to use (affects headers from s3 client). */ @Generated public enum SseEncryptionDetailsAlgorithm { AWS_SSE_KMS, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java index 7208948cb..94c4d8abf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java @@ -65,7 +65,7 @@ public class TableInfo { @JsonProperty("effective_predictive_optimization_flag") private EffectivePredictiveOptimizationFlag effectivePredictiveOptimizationFlag; - /** Whether predictive optimization should be enabled for this object and objects under it. */ + /** */ @JsonProperty("enable_predictive_optimization") private EnablePredictiveOptimization enablePredictiveOptimization; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java index c4017fb49..d2a759d9f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java @@ -10,10 +10,6 @@ @Generated public class UpdateExternalLocation { - /** The AWS access point to use when accesing s3 for this external location. */ - @JsonProperty("access_point") - private String accessPoint; - /** User-provided free-form text description. */ @JsonProperty("comment") private String comment; @@ -22,6 +18,10 @@ public class UpdateExternalLocation { @JsonProperty("credential_name") private String credentialName; + /** [Create:OPT Update:OPT] Whether to enable file events on this external location. */ + @JsonProperty("enable_file_events") + private Boolean enableFileEvents; + /** Encryption options that apply to clients connecting to cloud storage. */ @JsonProperty("encryption_details") private EncryptionDetails encryptionDetails; @@ -34,6 +34,10 @@ public class UpdateExternalLocation { @JsonProperty("fallback") private Boolean fallback; + /** [Create:OPT Update:OPT] File event queue settings. */ + @JsonProperty("file_event_queue") + private FileEventQueue fileEventQueue; + /** Force update even if changing url invalidates dependent external tables or mounts. */ @JsonProperty("force") private Boolean force; @@ -65,15 +69,6 @@ public class UpdateExternalLocation { @JsonProperty("url") private String url; - public UpdateExternalLocation setAccessPoint(String accessPoint) { - this.accessPoint = accessPoint; - return this; - } - - public String getAccessPoint() { - return accessPoint; - } - public UpdateExternalLocation setComment(String comment) { this.comment = comment; return this; @@ -92,6 +87,15 @@ public String getCredentialName() { return credentialName; } + public UpdateExternalLocation setEnableFileEvents(Boolean enableFileEvents) { + this.enableFileEvents = enableFileEvents; + return this; + } + + public Boolean getEnableFileEvents() { + return enableFileEvents; + } + public UpdateExternalLocation setEncryptionDetails(EncryptionDetails encryptionDetails) { this.encryptionDetails = encryptionDetails; return this; @@ -110,6 +114,15 @@ public Boolean getFallback() { return fallback; } + public UpdateExternalLocation setFileEventQueue(FileEventQueue fileEventQueue) { + this.fileEventQueue = fileEventQueue; + return this; + } + + public FileEventQueue getFileEventQueue() { + return fileEventQueue; + } + public UpdateExternalLocation setForce(Boolean force) { this.force = force; return this; @@ -187,11 +200,12 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; UpdateExternalLocation that = (UpdateExternalLocation) o; - return Objects.equals(accessPoint, that.accessPoint) - && Objects.equals(comment, that.comment) + return Objects.equals(comment, that.comment) && Objects.equals(credentialName, that.credentialName) + && Objects.equals(enableFileEvents, that.enableFileEvents) && Objects.equals(encryptionDetails, that.encryptionDetails) && Objects.equals(fallback, that.fallback) + && Objects.equals(fileEventQueue, that.fileEventQueue) && Objects.equals(force, that.force) && Objects.equals(isolationMode, that.isolationMode) && Objects.equals(name, that.name) @@ -205,11 +219,12 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - accessPoint, comment, credentialName, + enableFileEvents, encryptionDetails, fallback, + fileEventQueue, force, isolationMode, name, @@ -223,11 +238,12 @@ public int hashCode() { @Override public String toString() { return new ToStringer(UpdateExternalLocation.class) - .add("accessPoint", accessPoint) .add("comment", comment) .add("credentialName", credentialName) + .add("enableFileEvents", enableFileEvents) .add("encryptionDetails", encryptionDetails) .add("fallback", fallback) + .add("fileEventQueue", fileEventQueue) .add("force", force) .add("isolationMode", isolationMode) .add("name", name) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSchema.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSchema.java index 5c910b1e7..0cb434d26 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSchema.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSchema.java @@ -15,7 +15,7 @@ public class UpdateSchema { @JsonProperty("comment") private String comment; - /** Whether predictive optimization should be enabled for this object and objects under it. */ + /** */ @JsonProperty("enable_predictive_optimization") private EnablePredictiveOptimization enablePredictiveOptimization; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetNotebook.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetNotebook.java index 097cb43c9..299fdd186 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetNotebook.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetNotebook.java @@ -5,6 +5,7 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; import java.util.Objects; @Generated @@ -20,6 +21,18 @@ public class CleanRoomAssetNotebook { @JsonProperty("notebook_content") private String notebookContent; + /** top-level status derived from all reviews */ + @JsonProperty("review_state") + private CleanRoomNotebookReviewNotebookReviewState reviewState; + + /** All existing approvals or rejections */ + @JsonProperty("reviews") + private Collection reviews; + + /** collaborators that can run the notebook */ + @JsonProperty("runner_collaborator_aliases") + private Collection runnerCollaboratorAliases; + public CleanRoomAssetNotebook setEtag(String etag) { this.etag = etag; return this; @@ -38,17 +51,50 @@ public String getNotebookContent() { return notebookContent; } + public CleanRoomAssetNotebook setReviewState( + CleanRoomNotebookReviewNotebookReviewState reviewState) { + this.reviewState = reviewState; + return this; + } + + public CleanRoomNotebookReviewNotebookReviewState getReviewState() { + return reviewState; + } + + public CleanRoomAssetNotebook setReviews(Collection reviews) { + this.reviews = reviews; + return this; + } + + public Collection getReviews() { + return reviews; + } + + public CleanRoomAssetNotebook setRunnerCollaboratorAliases( + Collection runnerCollaboratorAliases) { + this.runnerCollaboratorAliases = runnerCollaboratorAliases; + return this; + } + + public Collection getRunnerCollaboratorAliases() { + return runnerCollaboratorAliases; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CleanRoomAssetNotebook that = (CleanRoomAssetNotebook) o; - return Objects.equals(etag, that.etag) && Objects.equals(notebookContent, that.notebookContent); + return Objects.equals(etag, that.etag) + && Objects.equals(notebookContent, that.notebookContent) + && Objects.equals(reviewState, that.reviewState) + && Objects.equals(reviews, that.reviews) + && Objects.equals(runnerCollaboratorAliases, that.runnerCollaboratorAliases); } @Override public int hashCode() { - return Objects.hash(etag, notebookContent); + return Objects.hash(etag, notebookContent, reviewState, reviews, runnerCollaboratorAliases); } @Override @@ -56,6 +102,9 @@ public String toString() { return new ToStringer(CleanRoomAssetNotebook.class) .add("etag", etag) .add("notebookContent", notebookContent) + .add("reviewState", reviewState) + .add("reviews", reviews) + .add("runnerCollaboratorAliases", runnerCollaboratorAliases) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookReview.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookReview.java new file mode 100755 index 000000000..035bd6b57 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookReview.java @@ -0,0 +1,107 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CleanRoomNotebookReview { + /** review comment */ + @JsonProperty("comment") + private String comment; + + /** timestamp of when the review was submitted */ + @JsonProperty("created_at_millis") + private Long createdAtMillis; + + /** review outcome */ + @JsonProperty("review_state") + private CleanRoomNotebookReviewNotebookReviewState reviewState; + + /** specified when the review was not explicitly made by a user */ + @JsonProperty("review_sub_reason") + private CleanRoomNotebookReviewNotebookReviewSubReason reviewSubReason; + + /** collaborator alias of the reviewer */ + @JsonProperty("reviewer_collaborator_alias") + private String reviewerCollaboratorAlias; + + public CleanRoomNotebookReview setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public CleanRoomNotebookReview setCreatedAtMillis(Long createdAtMillis) { + this.createdAtMillis = createdAtMillis; + return this; + } + + public Long getCreatedAtMillis() { + return createdAtMillis; + } + + public CleanRoomNotebookReview setReviewState( + CleanRoomNotebookReviewNotebookReviewState reviewState) { + this.reviewState = reviewState; + return this; + } + + public CleanRoomNotebookReviewNotebookReviewState getReviewState() { + return reviewState; + } + + public CleanRoomNotebookReview setReviewSubReason( + CleanRoomNotebookReviewNotebookReviewSubReason reviewSubReason) { + this.reviewSubReason = reviewSubReason; + return this; + } + + public CleanRoomNotebookReviewNotebookReviewSubReason getReviewSubReason() { + return reviewSubReason; + } + + public CleanRoomNotebookReview setReviewerCollaboratorAlias(String reviewerCollaboratorAlias) { + this.reviewerCollaboratorAlias = reviewerCollaboratorAlias; + return this; + } + + public String getReviewerCollaboratorAlias() { + return reviewerCollaboratorAlias; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CleanRoomNotebookReview that = (CleanRoomNotebookReview) o; + return Objects.equals(comment, that.comment) + && Objects.equals(createdAtMillis, that.createdAtMillis) + && Objects.equals(reviewState, that.reviewState) + && Objects.equals(reviewSubReason, that.reviewSubReason) + && Objects.equals(reviewerCollaboratorAlias, that.reviewerCollaboratorAlias); + } + + @Override + public int hashCode() { + return Objects.hash( + comment, createdAtMillis, reviewState, reviewSubReason, reviewerCollaboratorAlias); + } + + @Override + public String toString() { + return new ToStringer(CleanRoomNotebookReview.class) + .add("comment", comment) + .add("createdAtMillis", createdAtMillis) + .add("reviewState", reviewState) + .add("reviewSubReason", reviewSubReason) + .add("reviewerCollaboratorAlias", reviewerCollaboratorAlias) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookReviewNotebookReviewState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookReviewNotebookReviewState.java new file mode 100755 index 000000000..6366690c5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookReviewNotebookReviewState.java @@ -0,0 +1,12 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum CleanRoomNotebookReviewNotebookReviewState { + APPROVED, + PENDING, + REJECTED, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookReviewNotebookReviewSubReason.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookReviewNotebookReviewSubReason.java new file mode 100755 index 000000000..40ecde66c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookReviewNotebookReviewSubReason.java @@ -0,0 +1,11 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.cleanrooms; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum CleanRoomNotebookReviewNotebookReviewSubReason { + AUTO_APPROVED, + BACKFILLED, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookTaskRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookTaskRun.java index 0097643eb..93cc03a88 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookTaskRun.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookTaskRun.java @@ -18,6 +18,10 @@ public class CleanRoomNotebookTaskRun { @JsonProperty("collaborator_job_run_info") private CollaboratorJobRunInfo collaboratorJobRunInfo; + /** Etag of the notebook executed in this task run, used to identify the notebook version. */ + @JsonProperty("notebook_etag") + private String notebookEtag; + /** State of the task run. */ @JsonProperty("notebook_job_run_state") private com.databricks.sdk.service.jobs.CleanRoomTaskRunState notebookJobRunState; @@ -26,6 +30,10 @@ public class CleanRoomNotebookTaskRun { @JsonProperty("notebook_name") private String notebookName; + /** The timestamp of when the notebook was last updated. */ + @JsonProperty("notebook_updated_at") + private Long notebookUpdatedAt; + /** Expiration time of the output schema of the task run (if any), in epoch milliseconds. */ @JsonProperty("output_schema_expiration_time") private Long outputSchemaExpirationTime; @@ -52,6 +60,15 @@ public CollaboratorJobRunInfo getCollaboratorJobRunInfo() { return collaboratorJobRunInfo; } + public CleanRoomNotebookTaskRun setNotebookEtag(String notebookEtag) { + this.notebookEtag = notebookEtag; + return this; + } + + public String getNotebookEtag() { + return notebookEtag; + } + public CleanRoomNotebookTaskRun setNotebookJobRunState( com.databricks.sdk.service.jobs.CleanRoomTaskRunState notebookJobRunState) { this.notebookJobRunState = notebookJobRunState; @@ -71,6 +88,15 @@ public String getNotebookName() { return notebookName; } + public CleanRoomNotebookTaskRun setNotebookUpdatedAt(Long notebookUpdatedAt) { + this.notebookUpdatedAt = notebookUpdatedAt; + return this; + } + + public Long getNotebookUpdatedAt() { + return notebookUpdatedAt; + } + public CleanRoomNotebookTaskRun setOutputSchemaExpirationTime(Long outputSchemaExpirationTime) { this.outputSchemaExpirationTime = outputSchemaExpirationTime; return this; @@ -113,8 +139,10 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; CleanRoomNotebookTaskRun that = (CleanRoomNotebookTaskRun) o; return Objects.equals(collaboratorJobRunInfo, that.collaboratorJobRunInfo) + && Objects.equals(notebookEtag, that.notebookEtag) && Objects.equals(notebookJobRunState, that.notebookJobRunState) && Objects.equals(notebookName, that.notebookName) + && Objects.equals(notebookUpdatedAt, that.notebookUpdatedAt) && Objects.equals(outputSchemaExpirationTime, that.outputSchemaExpirationTime) && Objects.equals(outputSchemaName, that.outputSchemaName) && Objects.equals(runDuration, that.runDuration) @@ -125,8 +153,10 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( collaboratorJobRunInfo, + notebookEtag, notebookJobRunState, notebookName, + notebookUpdatedAt, outputSchemaExpirationTime, outputSchemaName, runDuration, @@ -137,8 +167,10 @@ public int hashCode() { public String toString() { return new ToStringer(CleanRoomNotebookTaskRun.class) .add("collaboratorJobRunInfo", collaboratorJobRunInfo) + .add("notebookEtag", notebookEtag) .add("notebookJobRunState", notebookJobRunState) .add("notebookName", notebookName) + .add("notebookUpdatedAt", notebookUpdatedAt) .add("outputSchemaExpirationTime", outputSchemaExpirationTime) .add("outputSchemaName", outputSchemaName) .add("runDuration", runDuration) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java index 38f4c5061..ff9668106 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java @@ -50,7 +50,8 @@ public class ClusterAttributes { /** * Cluster name requested by the user. This doesn't have to be unique. If not specified at - * creation, the cluster name will be an empty string. + * creation, the cluster name will be an empty string. For job clusters, the cluster name is + * automatically set based on the job and job run IDs. */ @JsonProperty("cluster_name") private String clusterName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java index 3623d38c3..8b957e3dc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java @@ -77,7 +77,8 @@ public class ClusterDetails { /** * Cluster name requested by the user. This doesn't have to be unique. If not specified at - * creation, the cluster name will be an empty string. + * creation, the cluster name will be an empty string. For job clusters, the cluster name is + * automatically set based on the job and job run IDs. */ @JsonProperty("cluster_name") private String clusterName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java index 31235a846..08cd8a715 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java @@ -64,7 +64,8 @@ public class ClusterSpec { /** * Cluster name requested by the user. This doesn't have to be unique. If not specified at - * creation, the cluster name will be an empty string. + * creation, the cluster name will be an empty string. For job clusters, the cluster name is + * automatically set based on the job and job run IDs. */ @JsonProperty("cluster_name") private String clusterName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java index a8c6b9270..79853eda0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java @@ -67,7 +67,8 @@ public class CreateCluster { /** * Cluster name requested by the user. This doesn't have to be unique. If not specified at - * creation, the cluster name will be an empty string. + * creation, the cluster name will be an empty string. For job clusters, the cluster name is + * automatically set based on the job and job run IDs. */ @JsonProperty("cluster_name") private String clusterName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java index c10ea0a7a..81c1b7e85 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java @@ -64,7 +64,8 @@ public class EditCluster { /** * Cluster name requested by the user. This doesn't have to be unique. If not specified at - * creation, the cluster name will be an empty string. + * creation, the cluster name will be an empty string. For job clusters, the cluster name is + * automatically set based on the job and job run IDs. */ @JsonProperty("cluster_name") private String clusterName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java index fb3c4aa48..d6dcdf38c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java @@ -10,9 +10,7 @@ /** * The environment entity used to preserve serverless environment side panel, jobs' environment for - * non-notebook task, and DLT's environment for classic and serverless pipelines. (Note: DLT uses a - * copied version of the Environment proto below, at - * //spark/pipelines/api/protos/copied/libraries-environments-copy.proto) In this minimal + * non-notebook task, and DLT's environment for classic and serverless pipelines. In this minimal * environment spec, only pip dependencies are supported. */ @Generated diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesAPI.java index 2408fad89..89729b951 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesAPI.java @@ -38,8 +38,10 @@ public void add(String instanceProfileArn) { /** * Register an instance profile. * - *

In the UI, you can select the instance profile when launching clusters. This API is only - * available to admin users. + *

Registers an instance profile in Databricks. In the UI, you can then give users the + * permission to use this instance profile when launching clusters. + * + *

This API is only available to admin users. */ public void add(AddInstanceProfile request) { impl.add(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesService.java index ac5a1b443..eb7e78c1d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesService.java @@ -20,8 +20,10 @@ public interface InstanceProfilesService { /** * Register an instance profile. * - *

In the UI, you can select the instance profile when launching clusters. This API is only - * available to admin users. + *

Registers an instance profile in Databricks. In the UI, you can then give users the + * permission to use this instance profile when launching clusters. + * + *

This API is only available to admin users. */ void add(AddInstanceProfile addInstanceProfile); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java index 273736519..151d44359 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java @@ -53,7 +53,8 @@ public class UpdateClusterResource { /** * Cluster name requested by the user. This doesn't have to be unique. If not specified at - * creation, the cluster name will be an empty string. + * creation, the cluster name will be an empty string. For job clusters, the cluster name is + * automatically set based on the job and job run IDs. */ @JsonProperty("cluster_name") private String clusterName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlProxyAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlProxyAPI.java index 613cdd616..6d95a309b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlProxyAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlProxyAPI.java @@ -10,7 +10,7 @@ * These APIs manage access rules on resources in an account. Currently, only grant rules are * supported. A grant rule specifies a role assigned to a set of principals. A list of rules * attached to a resource is called a rule set. A workspace must belong to an account for these APIs - * to work. + * to work */ @Generated public class AccountAccessControlProxyAPI { @@ -36,7 +36,7 @@ public GetAssignableRolesForResourceResponse getAssignableRolesForResource(Strin /** * Get assignable roles for a resource. * - *

Gets all the roles that can be granted on an account-level resource. A role is grantable if + *

Gets all the roles that can be granted on an account level resource. A role is grantable if * the rule set on the resource can contain an access rule of the role. */ public GetAssignableRolesForResourceResponse getAssignableRolesForResource( @@ -66,9 +66,8 @@ public RuleSetResponse updateRuleSet(String name, RuleSetUpdateRequest ruleSet) /** * Update a rule set. * - *

Replace the rules of a rule set. First, use a GET rule set request to read the current - * version of the rule set before modifying it. This pattern helps prevent conflicts between - * concurrent updates. + *

Replace the rules of a rule set. First, use get to read the current version of the rule set + * before modifying it. This pattern helps prevent conflicts between concurrent updates. */ public RuleSetResponse updateRuleSet(UpdateRuleSetRequest request) { return impl.updateRuleSet(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlProxyService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlProxyService.java index 18282b4db..3de92d68e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlProxyService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlProxyService.java @@ -7,7 +7,7 @@ * These APIs manage access rules on resources in an account. Currently, only grant rules are * supported. A grant rule specifies a role assigned to a set of principals. A list of rules * attached to a resource is called a rule set. A workspace must belong to an account for these APIs - * to work. + * to work * *

This is the high-level interface, that contains generated methods. * @@ -18,7 +18,7 @@ public interface AccountAccessControlProxyService { /** * Get assignable roles for a resource. * - *

Gets all the roles that can be granted on an account-level resource. A role is grantable if + *

Gets all the roles that can be granted on an account level resource. A role is grantable if * the rule set on the resource can contain an access rule of the role. */ GetAssignableRolesForResourceResponse getAssignableRolesForResource( @@ -36,9 +36,8 @@ GetAssignableRolesForResourceResponse getAssignableRolesForResource( /** * Update a rule set. * - *

Replace the rules of a rule set. First, use a GET rule set request to read the current - * version of the rule set before modifying it. This pattern helps prevent conflicts between - * concurrent updates. + *

Replace the rules of a rule set. First, use get to read the current version of the rule set + * before modifying it. This pattern helps prevent conflicts between concurrent updates. */ RuleSetResponse updateRuleSet(UpdateRuleSetRequest updateRuleSetRequest); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAssignableRolesForResourceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAssignableRolesForResourceRequest.java index e8119fff8..2b034b2d3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAssignableRolesForResourceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAssignableRolesForResourceRequest.java @@ -11,7 +11,14 @@ /** Get assignable roles for a resource */ @Generated public class GetAssignableRolesForResourceRequest { - /** The resource name for which assignable roles will be listed. */ + /** + * The resource name for which assignable roles will be listed. + * + *

Examples | Summary :--- | :--- `resource=accounts/` | A resource name for the + * account. `resource=accounts//groups/` | A resource name for the group. + * `resource=accounts//servicePrincipals/` | A resource name for the service + * principal. + */ @JsonIgnore @QueryParam("resource") private String resource; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetRuleSetRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetRuleSetRequest.java index eca981543..611758c5e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetRuleSetRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetRuleSetRequest.java @@ -18,12 +18,24 @@ public class GetRuleSetRequest { * read -> modify -> write pattern to perform rule set updates in order to avoid race conditions * that is get an etag from a GET rule set request, and pass it with the PUT update request to * identify the rule set version you are updating. + * + *

Examples | Summary :--- | :--- `etag=` | An empty etag can only be used in GET to indicate + * no freshness requirements. `etag=RENUAAABhSweA4NvVmmUYdiU717H3Tgy0UJdor3gE4a+mq/oj9NjAf8ZsQ==` + * | An etag encoded a specific version of the rule set to get or to be updated. */ @JsonIgnore @QueryParam("etag") private String etag; - /** The ruleset name associated with the request. */ + /** + * The ruleset name associated with the request. + * + *

Examples | Summary :--- | :--- `name=accounts//ruleSets/default` | A name for a + * rule set on the account. `name=accounts//groups//ruleSets/default` | A + * name for a rule set on the group. + * `name=accounts//servicePrincipals//ruleSets/default` + * | A name for a rule set on the service principal. + */ @JsonIgnore @QueryParam("name") private String name; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GrantRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GrantRule.java index bfc5e166e..fffa26c0c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GrantRule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GrantRule.java @@ -10,7 +10,12 @@ @Generated public class GrantRule { - /** Principals this grant rule applies to. */ + /** + * Principals this grant rule applies to. A principal can be a user (for end users), a service + * principal (for applications and compute workloads), or an account group. Each principal has its + * own identifier format: * users/ * groups/ * + * servicePrincipals/ + */ @JsonProperty("principals") private Collection principals; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/RuleSetResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/RuleSetResponse.java index 973d5e29e..137292095 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/RuleSetResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/RuleSetResponse.java @@ -10,7 +10,15 @@ @Generated public class RuleSetResponse { - /** Identifies the version of the rule set returned. */ + /** + * Identifies the version of the rule set returned. Etag used for versioning. The response is at + * least as fresh as the eTag provided. Etag is used for optimistic concurrency control as a way + * to help prevent simultaneous updates of a rule set from overwriting each other. It is strongly + * suggested that systems make use of the etag in the read -> modify -> write pattern to perform + * rule set updates in order to avoid race conditions that is get an etag from a GET rule set + * request, and pass it with the PUT update request to identify the rule set version you are + * updating. + */ @JsonProperty("etag") private String etag; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/RuleSetUpdateRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/RuleSetUpdateRequest.java index 720b98ee6..cd70639aa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/RuleSetUpdateRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/RuleSetUpdateRequest.java @@ -11,8 +11,13 @@ @Generated public class RuleSetUpdateRequest { /** - * The expected etag of the rule set to update. The update will fail if the value does not match - * the value that is stored in account access control service. + * Identifies the version of the rule set returned. Etag used for versioning. The response is at + * least as fresh as the eTag provided. Etag is used for optimistic concurrency control as a way + * to help prevent simultaneous updates of a rule set from overwriting each other. It is strongly + * suggested that systems make use of the etag in the read -> modify -> write pattern to perform + * rule set updates in order to avoid race conditions that is get an etag from a GET rule set + * request, and pass it with the PUT update request to identify the rule set version you are + * updating. */ @JsonProperty("etag") private String etag; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEnvironment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEnvironment.java index 188e17126..dc3dcccea 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEnvironment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEnvironment.java @@ -15,10 +15,8 @@ public class JobEnvironment { /** * The environment entity used to preserve serverless environment side panel, jobs' environment - * for non-notebook task, and DLT's environment for classic and serverless pipelines. (Note: DLT - * uses a copied version of the Environment proto below, at - * //spark/pipelines/api/protos/copied/libraries-environments-copy.proto) In this minimal - * environment spec, only pip dependencies are supported. + * for non-notebook task, and DLT's environment for classic and serverless pipelines. In this + * minimal environment spec, only pip dependencies are supported. */ @JsonProperty("spec") private com.databricks.sdk.service.compute.Environment spec; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateLoggedModelRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateLoggedModelRequest.java new file mode 100755 index 000000000..63308f48e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateLoggedModelRequest.java @@ -0,0 +1,120 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class CreateLoggedModelRequest { + /** The ID of the experiment that owns the model. */ + @JsonProperty("experiment_id") + private String experimentId; + + /** The type of the model, such as ``"Agent"``, ``"Classifier"``, ``"LLM"``. */ + @JsonProperty("model_type") + private String modelType; + + /** The name of the model (optional). If not specified one will be generated. */ + @JsonProperty("name") + private String name; + + /** Parameters attached to the model. */ + @JsonProperty("params") + private Collection params; + + /** The ID of the run that created the model. */ + @JsonProperty("source_run_id") + private String sourceRunId; + + /** Tags attached to the model. */ + @JsonProperty("tags") + private Collection tags; + + public CreateLoggedModelRequest setExperimentId(String experimentId) { + this.experimentId = experimentId; + return this; + } + + public String getExperimentId() { + return experimentId; + } + + public CreateLoggedModelRequest setModelType(String modelType) { + this.modelType = modelType; + return this; + } + + public String getModelType() { + return modelType; + } + + public CreateLoggedModelRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateLoggedModelRequest setParams(Collection params) { + this.params = params; + return this; + } + + public Collection getParams() { + return params; + } + + public CreateLoggedModelRequest setSourceRunId(String sourceRunId) { + this.sourceRunId = sourceRunId; + return this; + } + + public String getSourceRunId() { + return sourceRunId; + } + + public CreateLoggedModelRequest setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateLoggedModelRequest that = (CreateLoggedModelRequest) o; + return Objects.equals(experimentId, that.experimentId) + && Objects.equals(modelType, that.modelType) + && Objects.equals(name, that.name) + && Objects.equals(params, that.params) + && Objects.equals(sourceRunId, that.sourceRunId) + && Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash(experimentId, modelType, name, params, sourceRunId, tags); + } + + @Override + public String toString() { + return new ToStringer(CreateLoggedModelRequest.class) + .add("experimentId", experimentId) + .add("modelType", modelType) + .add("name", name) + .add("params", params) + .add("sourceRunId", sourceRunId) + .add("tags", tags) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateLoggedModelResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateLoggedModelResponse.java new file mode 100755 index 000000000..4e74cc259 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateLoggedModelResponse.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateLoggedModelResponse { + /** The newly created logged model. */ + @JsonProperty("model") + private LoggedModel model; + + public CreateLoggedModelResponse setModel(LoggedModel model) { + this.model = model; + return this; + } + + public LoggedModel getModel() { + return model; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateLoggedModelResponse that = (CreateLoggedModelResponse) o; + return Objects.equals(model, that.model); + } + + @Override + public int hashCode() { + return Objects.hash(model); + } + + @Override + public String toString() { + return new ToStringer(CreateLoggedModelResponse.class).add("model", model).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelRequest.java new file mode 100755 index 000000000..011c95025 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelRequest.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a logged model */ +@Generated +public class DeleteLoggedModelRequest { + /** The ID of the logged model to delete. */ + @JsonIgnore private String modelId; + + public DeleteLoggedModelRequest setModelId(String modelId) { + this.modelId = modelId; + return this; + } + + public String getModelId() { + return modelId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteLoggedModelRequest that = (DeleteLoggedModelRequest) o; + return Objects.equals(modelId, that.modelId); + } + + @Override + public int hashCode() { + return Objects.hash(modelId); + } + + @Override + public String toString() { + return new ToStringer(DeleteLoggedModelRequest.class).add("modelId", modelId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelResponse.java new file mode 100755 index 000000000..4032513b7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelResponse.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +public class DeleteLoggedModelResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteLoggedModelResponse.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagRequest.java new file mode 100755 index 000000000..e5c0983e6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagRequest.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a tag on a logged model */ +@Generated +public class DeleteLoggedModelTagRequest { + /** The ID of the logged model to delete the tag from. */ + @JsonIgnore private String modelId; + + /** The tag key. */ + @JsonIgnore private String tagKey; + + public DeleteLoggedModelTagRequest setModelId(String modelId) { + this.modelId = modelId; + return this; + } + + public String getModelId() { + return modelId; + } + + public DeleteLoggedModelTagRequest setTagKey(String tagKey) { + this.tagKey = tagKey; + return this; + } + + public String getTagKey() { + return tagKey; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteLoggedModelTagRequest that = (DeleteLoggedModelTagRequest) o; + return Objects.equals(modelId, that.modelId) && Objects.equals(tagKey, that.tagKey); + } + + @Override + public int hashCode() { + return Objects.hash(modelId, tagKey); + } + + @Override + public String toString() { + return new ToStringer(DeleteLoggedModelTagRequest.class) + .add("modelId", modelId) + .add("tagKey", tagKey) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagResponse.java new file mode 100755 index 000000000..5e9f53856 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagResponse.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +public class DeleteLoggedModelTagResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteLoggedModelTagResponse.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java index 053dd8525..bcc631e68 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java @@ -49,6 +49,15 @@ public CreateExperimentResponse createExperiment(CreateExperiment request) { return impl.createExperiment(request); } + public CreateLoggedModelResponse createLoggedModel(String experimentId) { + return createLoggedModel(new CreateLoggedModelRequest().setExperimentId(experimentId)); + } + + /** Create a logged model. */ + public CreateLoggedModelResponse createLoggedModel(CreateLoggedModelRequest request) { + return impl.createLoggedModel(request); + } + /** * Create a run. * @@ -74,6 +83,24 @@ public void deleteExperiment(DeleteExperiment request) { impl.deleteExperiment(request); } + public void deleteLoggedModel(String modelId) { + deleteLoggedModel(new DeleteLoggedModelRequest().setModelId(modelId)); + } + + /** Delete a logged model. */ + public void deleteLoggedModel(DeleteLoggedModelRequest request) { + impl.deleteLoggedModel(request); + } + + public void deleteLoggedModelTag(String modelId, String tagKey) { + deleteLoggedModelTag(new DeleteLoggedModelTagRequest().setModelId(modelId).setTagKey(tagKey)); + } + + /** Delete a tag on a logged model. */ + public void deleteLoggedModelTag(DeleteLoggedModelTagRequest request) { + impl.deleteLoggedModelTag(request); + } + public void deleteRun(String runId) { deleteRun(new DeleteRun().setRunId(runId)); } @@ -117,6 +144,16 @@ public void deleteTag(DeleteTag request) { impl.deleteTag(request); } + public FinalizeLoggedModelResponse finalizeLoggedModel(String modelId, LoggedModelStatus status) { + return finalizeLoggedModel( + new FinalizeLoggedModelRequest().setModelId(modelId).setStatus(status)); + } + + /** Finalize a logged model. */ + public FinalizeLoggedModelResponse finalizeLoggedModel(FinalizeLoggedModelRequest request) { + return impl.finalizeLoggedModel(request); + } + public GetExperimentByNameResponse getByName(String experimentName) { return getByName(new GetByNameRequest().setExperimentName(experimentName)); } @@ -196,6 +233,15 @@ public Iterable getHistory(GetHistoryRequest request) { }); } + public GetLoggedModelResponse getLoggedModel(String modelId) { + return getLoggedModel(new GetLoggedModelRequest().setModelId(modelId)); + } + + /** Get a logged model. */ + public GetLoggedModelResponse getLoggedModel(GetLoggedModelRequest request) { + return impl.getLoggedModel(request); + } + public GetExperimentPermissionLevelsResponse getPermissionLevels(String experimentId) { return getPermissionLevels( new GetExperimentPermissionLevelsRequest().setExperimentId(experimentId)); @@ -283,6 +329,21 @@ public Iterable listExperiments(ListExperimentsRequest request) { }); } + public ListLoggedModelArtifactsResponse listLoggedModelArtifacts(String modelId) { + return listLoggedModelArtifacts(new ListLoggedModelArtifactsRequest().setModelId(modelId)); + } + + /** + * List artifacts for a logged model. + * + *

List artifacts for a logged model. Takes an optional ``artifact_directory_path`` prefix + * which if specified, the response contains only artifacts with the specified prefix. + */ + public ListLoggedModelArtifactsResponse listLoggedModelArtifacts( + ListLoggedModelArtifactsRequest request) { + return impl.listLoggedModelArtifacts(request); + } + /** * Log a batch of metrics/params/tags for a run. * @@ -349,6 +410,22 @@ public void logInputs(LogInputs request) { impl.logInputs(request); } + public void logLoggedModelParams(String modelId) { + logLoggedModelParams(new LogLoggedModelParamsRequest().setModelId(modelId)); + } + + /** + * Log params for a logged model. + * + *

Logs params for a logged model. A param is a key-value pair (string key, string value). + * Examples include hyperparameters used for ML model training. A param can be logged only once + * for a logged model, and attempting to overwrite an existing param with a different value will + * result in an error + */ + public void logLoggedModelParams(LogLoggedModelParamsRequest request) { + impl.logLoggedModelParams(request); + } + public void logMetric(String key, double value, long timestamp) { logMetric(new LogMetric().setKey(key).setValue(value).setTimestamp(timestamp)); } @@ -374,6 +451,22 @@ public void logModel(LogModel request) { impl.logModel(request); } + public void logOutputs(String runId) { + logOutputs(new LogOutputsRequest().setRunId(runId)); + } + + /** + * Log outputs from a run. + * + *

**NOTE**: Experimental: This API may change or be removed in a future release without + * warning. + * + *

Logs outputs, such as models, from an MLflow Run. + */ + public void logOutputs(LogOutputsRequest request) { + impl.logOutputs(request); + } + public void logParam(String key, String value) { logParam(new LogParam().setKey(key).setValue(value)); } @@ -457,6 +550,15 @@ public Iterable searchExperiments(SearchExperiments request) { }); } + /** + * Search logged models. + * + *

Search for Logged Models that satisfy specified search criteria. + */ + public SearchLoggedModelsResponse searchLoggedModels(SearchLoggedModelsRequest request) { + return impl.searchLoggedModels(request); + } + /** * Search for runs. * @@ -492,6 +594,15 @@ public void setExperimentTag(SetExperimentTag request) { impl.setExperimentTag(request); } + public void setLoggedModelTags(String modelId) { + setLoggedModelTags(new SetLoggedModelTagsRequest().setModelId(modelId)); + } + + /** Set a tag for a logged model. */ + public void setLoggedModelTags(SetLoggedModelTagsRequest request) { + impl.setLoggedModelTags(request); + } + public ExperimentPermissions setPermissions(String experimentId) { return setPermissions(new ExperimentPermissionsRequest().setExperimentId(experimentId)); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java index 0e3b7116b..c228b7e72 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java @@ -30,6 +30,20 @@ public CreateExperimentResponse createExperiment(CreateExperiment request) { } } + @Override + public CreateLoggedModelResponse createLoggedModel(CreateLoggedModelRequest request) { + String path = "/api/2.0/mlflow/logged-models"; + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateLoggedModelResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public CreateRunResponse createRun(CreateRun request) { String path = "/api/2.0/mlflow/runs/create"; @@ -58,6 +72,34 @@ public void deleteExperiment(DeleteExperiment request) { } } + @Override + public void deleteLoggedModel(DeleteLoggedModelRequest request) { + String path = String.format("/api/2.0/mlflow/logged-models/%s", request.getModelId()); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteLoggedModelResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteLoggedModelTag(DeleteLoggedModelTagRequest request) { + String path = + String.format( + "/api/2.0/mlflow/logged-models/%s/tags/%s", request.getModelId(), request.getTagKey()); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteLoggedModelTagResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public void deleteRun(DeleteRun request) { String path = "/api/2.0/mlflow/runs/delete"; @@ -100,6 +142,20 @@ public void deleteTag(DeleteTag request) { } } + @Override + public FinalizeLoggedModelResponse finalizeLoggedModel(FinalizeLoggedModelRequest request) { + String path = String.format("/api/2.0/mlflow/logged-models/%s", request.getModelId()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, FinalizeLoggedModelResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public GetExperimentByNameResponse getByName(GetByNameRequest request) { String path = "/api/2.0/mlflow/experiments/get-by-name"; @@ -171,6 +227,19 @@ public GetMetricHistoryResponse getHistory(GetHistoryRequest request) { } } + @Override + public GetLoggedModelResponse getLoggedModel(GetLoggedModelRequest request) { + String path = String.format("/api/2.0/mlflow/logged-models/%s", request.getModelId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetLoggedModelResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public GetExperimentPermissionLevelsResponse getPermissionLevels( GetExperimentPermissionLevelsRequest request) { @@ -239,6 +308,22 @@ public ListExperimentsResponse listExperiments(ListExperimentsRequest request) { } } + @Override + public ListLoggedModelArtifactsResponse listLoggedModelArtifacts( + ListLoggedModelArtifactsRequest request) { + String path = + String.format( + "/api/2.0/mlflow/logged-models/%s/artifacts/directories", request.getModelId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListLoggedModelArtifactsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public void logBatch(LogBatch request) { String path = "/api/2.0/mlflow/runs/log-batch"; @@ -267,6 +352,20 @@ public void logInputs(LogInputs request) { } } + @Override + public void logLoggedModelParams(LogLoggedModelParamsRequest request) { + String path = String.format("/api/2.0/mlflow/logged-models/%s/params", request.getModelId()); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, LogLoggedModelParamsRequestResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public void logMetric(LogMetric request) { String path = "/api/2.0/mlflow/runs/log-metric"; @@ -295,6 +394,20 @@ public void logModel(LogModel request) { } } + @Override + public void logOutputs(LogOutputsRequest request) { + String path = "/api/2.0/mlflow/runs/outputs"; + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, LogOutputsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public void logParam(LogParam request) { String path = "/api/2.0/mlflow/runs/log-parameter"; @@ -365,6 +478,20 @@ public SearchExperimentsResponse searchExperiments(SearchExperiments request) { } } + @Override + public SearchLoggedModelsResponse searchLoggedModels(SearchLoggedModelsRequest request) { + String path = "/api/2.0/mlflow/logged-models/search"; + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, SearchLoggedModelsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public SearchRunsResponse searchRuns(SearchRuns request) { String path = "/api/2.0/mlflow/runs/search"; @@ -393,6 +520,20 @@ public void setExperimentTag(SetExperimentTag request) { } } + @Override + public void setLoggedModelTags(SetLoggedModelTagsRequest request) { + String path = String.format("/api/2.0/mlflow/logged-models/%s/tags", request.getModelId()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, SetLoggedModelTagsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public ExperimentPermissions setPermissions(ExperimentPermissionsRequest request) { String path = String.format("/api/2.0/permissions/experiments/%s", request.getExperimentId()); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java index ec7ca96ea..abafed87e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java @@ -29,6 +29,9 @@ public interface ExperimentsService { */ CreateExperimentResponse createExperiment(CreateExperiment createExperiment); + /** Create a logged model. */ + CreateLoggedModelResponse createLoggedModel(CreateLoggedModelRequest createLoggedModelRequest); + /** * Create a run. * @@ -46,6 +49,12 @@ public interface ExperimentsService { */ void deleteExperiment(DeleteExperiment deleteExperiment); + /** Delete a logged model. */ + void deleteLoggedModel(DeleteLoggedModelRequest deleteLoggedModelRequest); + + /** Delete a tag on a logged model. */ + void deleteLoggedModelTag(DeleteLoggedModelTagRequest deleteLoggedModelTagRequest); + /** * Delete a run. * @@ -70,6 +79,10 @@ public interface ExperimentsService { */ void deleteTag(DeleteTag deleteTag); + /** Finalize a logged model. */ + FinalizeLoggedModelResponse finalizeLoggedModel( + FinalizeLoggedModelRequest finalizeLoggedModelRequest); + /** * Get an experiment by name. * @@ -105,6 +118,9 @@ GetCredentialsForTraceDataUploadResponse getCredentialsForTraceDataUpload( */ GetMetricHistoryResponse getHistory(GetHistoryRequest getHistoryRequest); + /** Get a logged model. */ + GetLoggedModelResponse getLoggedModel(GetLoggedModelRequest getLoggedModelRequest); + /** * Get experiment permission levels. * @@ -150,6 +166,15 @@ ExperimentPermissions getPermissions( */ ListExperimentsResponse listExperiments(ListExperimentsRequest listExperimentsRequest); + /** + * List artifacts for a logged model. + * + *

List artifacts for a logged model. Takes an optional ``artifact_directory_path`` prefix + * which if specified, the response contains only artifacts with the specified prefix. + */ + ListLoggedModelArtifactsResponse listLoggedModelArtifacts( + ListLoggedModelArtifactsRequest listLoggedModelArtifactsRequest); + /** * Log a batch of metrics/params/tags for a run. * @@ -208,6 +233,16 @@ ExperimentPermissions getPermissions( */ void logInputs(LogInputs logInputs); + /** + * Log params for a logged model. + * + *

Logs params for a logged model. A param is a key-value pair (string key, string value). + * Examples include hyperparameters used for ML model training. A param can be logged only once + * for a logged model, and attempting to overwrite an existing param with a different value will + * result in an error + */ + void logLoggedModelParams(LogLoggedModelParamsRequest logLoggedModelParamsRequest); + /** * Log a metric for a run. * @@ -225,6 +260,16 @@ ExperimentPermissions getPermissions( */ void logModel(LogModel logModel); + /** + * Log outputs from a run. + * + *

**NOTE**: Experimental: This API may change or be removed in a future release without + * warning. + * + *

Logs outputs, such as models, from an MLflow Run. + */ + void logOutputs(LogOutputsRequest logOutputsRequest); + /** * Log a param for a run. * @@ -271,6 +316,14 @@ ExperimentPermissions getPermissions( */ SearchExperimentsResponse searchExperiments(SearchExperiments searchExperiments); + /** + * Search logged models. + * + *

Search for Logged Models that satisfy specified search criteria. + */ + SearchLoggedModelsResponse searchLoggedModels( + SearchLoggedModelsRequest searchLoggedModelsRequest); + /** * Search for runs. * @@ -287,6 +340,9 @@ ExperimentPermissions getPermissions( */ void setExperimentTag(SetExperimentTag setExperimentTag); + /** Set a tag for a logged model. */ + void setLoggedModelTags(SetLoggedModelTagsRequest setLoggedModelTagsRequest); + /** * Set experiment permissions. * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelRequest.java new file mode 100755 index 000000000..e57f1bbae --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelRequest.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class FinalizeLoggedModelRequest { + /** The ID of the logged model to finalize. */ + @JsonIgnore private String modelId; + + /** + * Whether or not the model is ready for use. ``"LOGGED_MODEL_UPLOAD_FAILED"`` indicates that + * something went wrong when logging the model weights / agent code). + */ + @JsonProperty("status") + private LoggedModelStatus status; + + public FinalizeLoggedModelRequest setModelId(String modelId) { + this.modelId = modelId; + return this; + } + + public String getModelId() { + return modelId; + } + + public FinalizeLoggedModelRequest setStatus(LoggedModelStatus status) { + this.status = status; + return this; + } + + public LoggedModelStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FinalizeLoggedModelRequest that = (FinalizeLoggedModelRequest) o; + return Objects.equals(modelId, that.modelId) && Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash(modelId, status); + } + + @Override + public String toString() { + return new ToStringer(FinalizeLoggedModelRequest.class) + .add("modelId", modelId) + .add("status", status) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelResponse.java new file mode 100755 index 000000000..295478ce5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelResponse.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class FinalizeLoggedModelResponse { + /** The updated logged model. */ + @JsonProperty("model") + private LoggedModel model; + + public FinalizeLoggedModelResponse setModel(LoggedModel model) { + this.model = model; + return this; + } + + public LoggedModel getModel() { + return model; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FinalizeLoggedModelResponse that = (FinalizeLoggedModelResponse) o; + return Objects.equals(model, that.model); + } + + @Override + public int hashCode() { + return Objects.hash(model); + } + + @Override + public String toString() { + return new ToStringer(FinalizeLoggedModelResponse.class).add("model", model).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelRequest.java new file mode 100755 index 000000000..e2f070898 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelRequest.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a logged model */ +@Generated +public class GetLoggedModelRequest { + /** The ID of the logged model to retrieve. */ + @JsonIgnore private String modelId; + + public GetLoggedModelRequest setModelId(String modelId) { + this.modelId = modelId; + return this; + } + + public String getModelId() { + return modelId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetLoggedModelRequest that = (GetLoggedModelRequest) o; + return Objects.equals(modelId, that.modelId); + } + + @Override + public int hashCode() { + return Objects.hash(modelId); + } + + @Override + public String toString() { + return new ToStringer(GetLoggedModelRequest.class).add("modelId", modelId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelResponse.java new file mode 100755 index 000000000..afb45504b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelResponse.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class GetLoggedModelResponse { + /** The retrieved logged model. */ + @JsonProperty("model") + private LoggedModel model; + + public GetLoggedModelResponse setModel(LoggedModel model) { + this.model = model; + return this; + } + + public LoggedModel getModel() { + return model; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetLoggedModelResponse that = (GetLoggedModelResponse) o; + return Objects.equals(model, that.model); + } + + @Override + public int hashCode() { + return Objects.hash(model); + } + + @Override + public String toString() { + return new ToStringer(GetLoggedModelResponse.class).add("model", model).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsRequest.java new file mode 100755 index 000000000..e94842c95 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsRequest.java @@ -0,0 +1,83 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List artifacts for a logged model */ +@Generated +public class ListLoggedModelArtifactsRequest { + /** Filter artifacts matching this path (a relative path from the root artifact directory). */ + @JsonIgnore + @QueryParam("artifact_directory_path") + private String artifactDirectoryPath; + + /** The ID of the logged model for which to list the artifacts. */ + @JsonIgnore private String modelId; + + /** + * Token indicating the page of artifact results to fetch. `page_token` is not supported when + * listing artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved for UC Volumes. + * Please call `/api/2.0/fs/directories{directory_path}` for listing artifacts in UC Volumes, + * which supports pagination. See [List directory contents | Files + * API](/api/workspace/files/listdirectorycontents). + */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListLoggedModelArtifactsRequest setArtifactDirectoryPath(String artifactDirectoryPath) { + this.artifactDirectoryPath = artifactDirectoryPath; + return this; + } + + public String getArtifactDirectoryPath() { + return artifactDirectoryPath; + } + + public ListLoggedModelArtifactsRequest setModelId(String modelId) { + this.modelId = modelId; + return this; + } + + public String getModelId() { + return modelId; + } + + public ListLoggedModelArtifactsRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListLoggedModelArtifactsRequest that = (ListLoggedModelArtifactsRequest) o; + return Objects.equals(artifactDirectoryPath, that.artifactDirectoryPath) + && Objects.equals(modelId, that.modelId) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(artifactDirectoryPath, modelId, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListLoggedModelArtifactsRequest.class) + .add("artifactDirectoryPath", artifactDirectoryPath) + .add("modelId", modelId) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsResponse.java new file mode 100755 index 000000000..35e2dbe82 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsResponse.java @@ -0,0 +1,75 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListLoggedModelArtifactsResponse { + /** File location and metadata for artifacts. */ + @JsonProperty("files") + private Collection files; + + /** Token that can be used to retrieve the next page of artifact results */ + @JsonProperty("next_page_token") + private String nextPageToken; + + /** Root artifact directory for the logged model. */ + @JsonProperty("root_uri") + private String rootUri; + + public ListLoggedModelArtifactsResponse setFiles(Collection files) { + this.files = files; + return this; + } + + public Collection getFiles() { + return files; + } + + public ListLoggedModelArtifactsResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListLoggedModelArtifactsResponse setRootUri(String rootUri) { + this.rootUri = rootUri; + return this; + } + + public String getRootUri() { + return rootUri; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListLoggedModelArtifactsResponse that = (ListLoggedModelArtifactsResponse) o; + return Objects.equals(files, that.files) + && Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(rootUri, that.rootUri); + } + + @Override + public int hashCode() { + return Objects.hash(files, nextPageToken, rootUri); + } + + @Override + public String toString() { + return new ToStringer(ListLoggedModelArtifactsResponse.class) + .add("files", files) + .add("nextPageToken", nextPageToken) + .add("rootUri", rootUri) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogLoggedModelParamsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogLoggedModelParamsRequest.java new file mode 100755 index 000000000..39472485d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogLoggedModelParamsRequest.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class LogLoggedModelParamsRequest { + /** The ID of the logged model to log params for. */ + @JsonIgnore private String modelId; + + /** Parameters to attach to the model. */ + @JsonProperty("params") + private Collection params; + + public LogLoggedModelParamsRequest setModelId(String modelId) { + this.modelId = modelId; + return this; + } + + public String getModelId() { + return modelId; + } + + public LogLoggedModelParamsRequest setParams(Collection params) { + this.params = params; + return this; + } + + public Collection getParams() { + return params; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LogLoggedModelParamsRequest that = (LogLoggedModelParamsRequest) o; + return Objects.equals(modelId, that.modelId) && Objects.equals(params, that.params); + } + + @Override + public int hashCode() { + return Objects.hash(modelId, params); + } + + @Override + public String toString() { + return new ToStringer(LogLoggedModelParamsRequest.class) + .add("modelId", modelId) + .add("params", params) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogLoggedModelParamsRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogLoggedModelParamsRequestResponse.java new file mode 100755 index 000000000..770602c54 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogLoggedModelParamsRequestResponse.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +public class LogLoggedModelParamsRequestResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(LogLoggedModelParamsRequestResponse.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogOutputsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogOutputsRequest.java new file mode 100755 index 000000000..00fd2638a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogOutputsRequest.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class LogOutputsRequest { + /** The model outputs from the Run. */ + @JsonProperty("models") + private Collection models; + + /** The ID of the Run from which to log outputs. */ + @JsonProperty("run_id") + private String runId; + + public LogOutputsRequest setModels(Collection models) { + this.models = models; + return this; + } + + public Collection getModels() { + return models; + } + + public LogOutputsRequest setRunId(String runId) { + this.runId = runId; + return this; + } + + public String getRunId() { + return runId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LogOutputsRequest that = (LogOutputsRequest) o; + return Objects.equals(models, that.models) && Objects.equals(runId, that.runId); + } + + @Override + public int hashCode() { + return Objects.hash(models, runId); + } + + @Override + public String toString() { + return new ToStringer(LogOutputsRequest.class) + .add("models", models) + .add("runId", runId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogOutputsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogOutputsResponse.java new file mode 100755 index 000000000..8e3c962b3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogOutputsResponse.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +public class LogOutputsResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(LogOutputsResponse.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModel.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModel.java new file mode 100755 index 000000000..5a57b30c2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModel.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * A logged model message includes logged model attributes, tags, registration info, params, and + * linked run metrics. + */ +@Generated +public class LoggedModel { + /** The params and metrics attached to the logged model. */ + @JsonProperty("data") + private LoggedModelData data; + + /** The logged model attributes such as model ID, status, tags, etc. */ + @JsonProperty("info") + private LoggedModelInfo info; + + public LoggedModel setData(LoggedModelData data) { + this.data = data; + return this; + } + + public LoggedModelData getData() { + return data; + } + + public LoggedModel setInfo(LoggedModelInfo info) { + this.info = info; + return this; + } + + public LoggedModelInfo getInfo() { + return info; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LoggedModel that = (LoggedModel) o; + return Objects.equals(data, that.data) && Objects.equals(info, that.info); + } + + @Override + public int hashCode() { + return Objects.hash(data, info); + } + + @Override + public String toString() { + return new ToStringer(LoggedModel.class).add("data", data).add("info", info).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelData.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelData.java new file mode 100755 index 000000000..bdd0efc9c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelData.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** A LoggedModelData message includes logged model params and linked metrics. */ +@Generated +public class LoggedModelData { + /** Performance metrics linked to the model. */ + @JsonProperty("metrics") + private Collection metrics; + + /** Immutable string key-value pairs of the model. */ + @JsonProperty("params") + private Collection params; + + public LoggedModelData setMetrics(Collection metrics) { + this.metrics = metrics; + return this; + } + + public Collection getMetrics() { + return metrics; + } + + public LoggedModelData setParams(Collection params) { + this.params = params; + return this; + } + + public Collection getParams() { + return params; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LoggedModelData that = (LoggedModelData) o; + return Objects.equals(metrics, that.metrics) && Objects.equals(params, that.params); + } + + @Override + public int hashCode() { + return Objects.hash(metrics, params); + } + + @Override + public String toString() { + return new ToStringer(LoggedModelData.class) + .add("metrics", metrics) + .add("params", params) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelInfo.java new file mode 100755 index 000000000..e6f626243 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelInfo.java @@ -0,0 +1,223 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** A LoggedModelInfo includes logged model attributes, tags, and registration info. */ +@Generated +public class LoggedModelInfo { + /** The URI of the directory where model artifacts are stored. */ + @JsonProperty("artifact_uri") + private String artifactUri; + + /** The timestamp when the model was created in milliseconds since the UNIX epoch. */ + @JsonProperty("creation_timestamp_ms") + private Long creationTimestampMs; + + /** The ID of the user or principal that created the model. */ + @JsonProperty("creator_id") + private Long creatorId; + + /** The ID of the experiment that owns the model. */ + @JsonProperty("experiment_id") + private String experimentId; + + /** The timestamp when the model was last updated in milliseconds since the UNIX epoch. */ + @JsonProperty("last_updated_timestamp_ms") + private Long lastUpdatedTimestampMs; + + /** The unique identifier for the logged model. */ + @JsonProperty("model_id") + private String modelId; + + /** The type of model, such as ``"Agent"``, ``"Classifier"``, ``"LLM"``. */ + @JsonProperty("model_type") + private String modelType; + + /** The name of the model. */ + @JsonProperty("name") + private String name; + + /** The ID of the run that created the model. */ + @JsonProperty("source_run_id") + private String sourceRunId; + + /** The status of whether or not the model is ready for use. */ + @JsonProperty("status") + private LoggedModelStatus status; + + /** Details on the current model status. */ + @JsonProperty("status_message") + private String statusMessage; + + /** Mutable string key-value pairs set on the model. */ + @JsonProperty("tags") + private Collection tags; + + public LoggedModelInfo setArtifactUri(String artifactUri) { + this.artifactUri = artifactUri; + return this; + } + + public String getArtifactUri() { + return artifactUri; + } + + public LoggedModelInfo setCreationTimestampMs(Long creationTimestampMs) { + this.creationTimestampMs = creationTimestampMs; + return this; + } + + public Long getCreationTimestampMs() { + return creationTimestampMs; + } + + public LoggedModelInfo setCreatorId(Long creatorId) { + this.creatorId = creatorId; + return this; + } + + public Long getCreatorId() { + return creatorId; + } + + public LoggedModelInfo setExperimentId(String experimentId) { + this.experimentId = experimentId; + return this; + } + + public String getExperimentId() { + return experimentId; + } + + public LoggedModelInfo setLastUpdatedTimestampMs(Long lastUpdatedTimestampMs) { + this.lastUpdatedTimestampMs = lastUpdatedTimestampMs; + return this; + } + + public Long getLastUpdatedTimestampMs() { + return lastUpdatedTimestampMs; + } + + public LoggedModelInfo setModelId(String modelId) { + this.modelId = modelId; + return this; + } + + public String getModelId() { + return modelId; + } + + public LoggedModelInfo setModelType(String modelType) { + this.modelType = modelType; + return this; + } + + public String getModelType() { + return modelType; + } + + public LoggedModelInfo setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public LoggedModelInfo setSourceRunId(String sourceRunId) { + this.sourceRunId = sourceRunId; + return this; + } + + public String getSourceRunId() { + return sourceRunId; + } + + public LoggedModelInfo setStatus(LoggedModelStatus status) { + this.status = status; + return this; + } + + public LoggedModelStatus getStatus() { + return status; + } + + public LoggedModelInfo setStatusMessage(String statusMessage) { + this.statusMessage = statusMessage; + return this; + } + + public String getStatusMessage() { + return statusMessage; + } + + public LoggedModelInfo setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LoggedModelInfo that = (LoggedModelInfo) o; + return Objects.equals(artifactUri, that.artifactUri) + && Objects.equals(creationTimestampMs, that.creationTimestampMs) + && Objects.equals(creatorId, that.creatorId) + && Objects.equals(experimentId, that.experimentId) + && Objects.equals(lastUpdatedTimestampMs, that.lastUpdatedTimestampMs) + && Objects.equals(modelId, that.modelId) + && Objects.equals(modelType, that.modelType) + && Objects.equals(name, that.name) + && Objects.equals(sourceRunId, that.sourceRunId) + && Objects.equals(status, that.status) + && Objects.equals(statusMessage, that.statusMessage) + && Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash( + artifactUri, + creationTimestampMs, + creatorId, + experimentId, + lastUpdatedTimestampMs, + modelId, + modelType, + name, + sourceRunId, + status, + statusMessage, + tags); + } + + @Override + public String toString() { + return new ToStringer(LoggedModelInfo.class) + .add("artifactUri", artifactUri) + .add("creationTimestampMs", creationTimestampMs) + .add("creatorId", creatorId) + .add("experimentId", experimentId) + .add("lastUpdatedTimestampMs", lastUpdatedTimestampMs) + .add("modelId", modelId) + .add("modelType", modelType) + .add("name", name) + .add("sourceRunId", sourceRunId) + .add("status", status) + .add("statusMessage", statusMessage) + .add("tags", tags) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelParameter.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelParameter.java new file mode 100755 index 000000000..1d3ccc2b8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelParameter.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Parameter associated with a LoggedModel. */ +@Generated +public class LoggedModelParameter { + /** The key identifying this param. */ + @JsonProperty("key") + private String key; + + /** The value of this param. */ + @JsonProperty("value") + private String value; + + public LoggedModelParameter setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public LoggedModelParameter setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LoggedModelParameter that = (LoggedModelParameter) o; + return Objects.equals(key, that.key) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(key, value); + } + + @Override + public String toString() { + return new ToStringer(LoggedModelParameter.class) + .add("key", key) + .add("value", value) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelStatus.java new file mode 100755 index 000000000..be33d7741 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelStatus.java @@ -0,0 +1,13 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; + +/** A LoggedModelStatus enum value represents the status of a logged model. */ +@Generated +public enum LoggedModelStatus { + LOGGED_MODEL_PENDING, + LOGGED_MODEL_READY, + LOGGED_MODEL_UPLOAD_FAILED, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelTag.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelTag.java new file mode 100755 index 000000000..bd80c2d14 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelTag.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Tag for a LoggedModel. */ +@Generated +public class LoggedModelTag { + /** The tag key. */ + @JsonProperty("key") + private String key; + + /** The tag value. */ + @JsonProperty("value") + private String value; + + public LoggedModelTag setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public LoggedModelTag setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + LoggedModelTag that = (LoggedModelTag) o; + return Objects.equals(key, that.key) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(key, value); + } + + @Override + public String toString() { + return new ToStringer(LoggedModelTag.class).add("key", key).add("value", value).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelOutput.java new file mode 100755 index 000000000..6581459ee --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelOutput.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Represents a LoggedModel output of a Run. */ +@Generated +public class ModelOutput { + /** The unique identifier of the model. */ + @JsonProperty("model_id") + private String modelId; + + /** The step at which the model was produced. */ + @JsonProperty("step") + private Long step; + + public ModelOutput setModelId(String modelId) { + this.modelId = modelId; + return this; + } + + public String getModelId() { + return modelId; + } + + public ModelOutput setStep(Long step) { + this.step = step; + return this; + } + + public Long getStep() { + return step; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ModelOutput that = (ModelOutput) o; + return Objects.equals(modelId, that.modelId) && Objects.equals(step, that.step); + } + + @Override + public int hashCode() { + return Objects.hash(modelId, step); + } + + @Override + public String toString() { + return new ToStringer(ModelOutput.class).add("modelId", modelId).add("step", step).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsDataset.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsDataset.java new file mode 100755 index 000000000..495ca4cd7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsDataset.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class SearchLoggedModelsDataset { + /** The digest of the dataset. */ + @JsonProperty("dataset_digest") + private String datasetDigest; + + /** The name of the dataset. */ + @JsonProperty("dataset_name") + private String datasetName; + + public SearchLoggedModelsDataset setDatasetDigest(String datasetDigest) { + this.datasetDigest = datasetDigest; + return this; + } + + public String getDatasetDigest() { + return datasetDigest; + } + + public SearchLoggedModelsDataset setDatasetName(String datasetName) { + this.datasetName = datasetName; + return this; + } + + public String getDatasetName() { + return datasetName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SearchLoggedModelsDataset that = (SearchLoggedModelsDataset) o; + return Objects.equals(datasetDigest, that.datasetDigest) + && Objects.equals(datasetName, that.datasetName); + } + + @Override + public int hashCode() { + return Objects.hash(datasetDigest, datasetName); + } + + @Override + public String toString() { + return new ToStringer(SearchLoggedModelsDataset.class) + .add("datasetDigest", datasetDigest) + .add("datasetName", datasetName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsOrderBy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsOrderBy.java new file mode 100755 index 000000000..72e69fe1d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsOrderBy.java @@ -0,0 +1,97 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class SearchLoggedModelsOrderBy { + /** Whether the search results order is ascending or not. */ + @JsonProperty("ascending") + private Boolean ascending; + + /** + * If ``field_name`` refers to a metric, this field specifies the digest of the dataset associated + * with the metric. Only metrics associated with the specified dataset name and digest will be + * considered for ordering. This field may only be set if ``dataset_name`` is also set. + */ + @JsonProperty("dataset_digest") + private String datasetDigest; + + /** + * If ``field_name`` refers to a metric, this field specifies the name of the dataset associated + * with the metric. Only metrics associated with the specified dataset name will be considered for + * ordering. This field may only be set if ``field_name`` refers to a metric. + */ + @JsonProperty("dataset_name") + private String datasetName; + + /** The name of the field to order by, e.g. "metrics.accuracy". */ + @JsonProperty("field_name") + private String fieldName; + + public SearchLoggedModelsOrderBy setAscending(Boolean ascending) { + this.ascending = ascending; + return this; + } + + public Boolean getAscending() { + return ascending; + } + + public SearchLoggedModelsOrderBy setDatasetDigest(String datasetDigest) { + this.datasetDigest = datasetDigest; + return this; + } + + public String getDatasetDigest() { + return datasetDigest; + } + + public SearchLoggedModelsOrderBy setDatasetName(String datasetName) { + this.datasetName = datasetName; + return this; + } + + public String getDatasetName() { + return datasetName; + } + + public SearchLoggedModelsOrderBy setFieldName(String fieldName) { + this.fieldName = fieldName; + return this; + } + + public String getFieldName() { + return fieldName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SearchLoggedModelsOrderBy that = (SearchLoggedModelsOrderBy) o; + return Objects.equals(ascending, that.ascending) + && Objects.equals(datasetDigest, that.datasetDigest) + && Objects.equals(datasetName, that.datasetName) + && Objects.equals(fieldName, that.fieldName); + } + + @Override + public int hashCode() { + return Objects.hash(ascending, datasetDigest, datasetName, fieldName); + } + + @Override + public String toString() { + return new ToStringer(SearchLoggedModelsOrderBy.class) + .add("ascending", ascending) + .add("datasetDigest", datasetDigest) + .add("datasetName", datasetName) + .add("fieldName", fieldName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsRequest.java new file mode 100755 index 000000000..17ff5a9c4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsRequest.java @@ -0,0 +1,131 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class SearchLoggedModelsRequest { + /** + * List of datasets on which to apply the metrics filter clauses. For example, a filter with + * `metrics.accuracy > 0.9` and dataset info with name "test_dataset" means we will return all + * logged models with accuracy > 0.9 on the test_dataset. Metric values from ANY dataset matching + * the criteria are considered. If no datasets are specified, then metrics across all datasets are + * considered in the filter. + */ + @JsonProperty("datasets") + private Collection datasets; + + /** The IDs of the experiments in which to search for logged models. */ + @JsonProperty("experiment_ids") + private Collection experimentIds; + + /** + * A filter expression over logged model info and data that allows returning a subset of logged + * models. The syntax is a subset of SQL that supports AND'ing together binary operations. + * + *

Example: ``params.alpha < 0.3 AND metrics.accuracy > 0.9``. + */ + @JsonProperty("filter") + private String filter; + + /** The maximum number of Logged Models to return. The maximum limit is 50. */ + @JsonProperty("max_results") + private Long maxResults; + + /** The list of columns for ordering the results, with additional fields for sorting criteria. */ + @JsonProperty("order_by") + private Collection orderBy; + + /** The token indicating the page of logged models to fetch. */ + @JsonProperty("page_token") + private String pageToken; + + public SearchLoggedModelsRequest setDatasets(Collection datasets) { + this.datasets = datasets; + return this; + } + + public Collection getDatasets() { + return datasets; + } + + public SearchLoggedModelsRequest setExperimentIds(Collection experimentIds) { + this.experimentIds = experimentIds; + return this; + } + + public Collection getExperimentIds() { + return experimentIds; + } + + public SearchLoggedModelsRequest setFilter(String filter) { + this.filter = filter; + return this; + } + + public String getFilter() { + return filter; + } + + public SearchLoggedModelsRequest setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public SearchLoggedModelsRequest setOrderBy(Collection orderBy) { + this.orderBy = orderBy; + return this; + } + + public Collection getOrderBy() { + return orderBy; + } + + public SearchLoggedModelsRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SearchLoggedModelsRequest that = (SearchLoggedModelsRequest) o; + return Objects.equals(datasets, that.datasets) + && Objects.equals(experimentIds, that.experimentIds) + && Objects.equals(filter, that.filter) + && Objects.equals(maxResults, that.maxResults) + && Objects.equals(orderBy, that.orderBy) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(datasets, experimentIds, filter, maxResults, orderBy, pageToken); + } + + @Override + public String toString() { + return new ToStringer(SearchLoggedModelsRequest.class) + .add("datasets", datasets) + .add("experimentIds", experimentIds) + .add("filter", filter) + .add("maxResults", maxResults) + .add("orderBy", orderBy) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsResponse.java new file mode 100755 index 000000000..c96b840ef --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsResponse.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class SearchLoggedModelsResponse { + /** Logged models that match the search criteria. */ + @JsonProperty("models") + private Collection models; + + /** The token that can be used to retrieve the next page of logged models. */ + @JsonProperty("next_page_token") + private String nextPageToken; + + public SearchLoggedModelsResponse setModels(Collection models) { + this.models = models; + return this; + } + + public Collection getModels() { + return models; + } + + public SearchLoggedModelsResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SearchLoggedModelsResponse that = (SearchLoggedModelsResponse) o; + return Objects.equals(models, that.models) && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(models, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(SearchLoggedModelsResponse.class) + .add("models", models) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsRequest.java new file mode 100755 index 000000000..244249d38 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsRequest.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class SetLoggedModelTagsRequest { + /** The ID of the logged model to set the tags on. */ + @JsonIgnore private String modelId; + + /** The tags to set on the logged model. */ + @JsonProperty("tags") + private Collection tags; + + public SetLoggedModelTagsRequest setModelId(String modelId) { + this.modelId = modelId; + return this; + } + + public String getModelId() { + return modelId; + } + + public SetLoggedModelTagsRequest setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SetLoggedModelTagsRequest that = (SetLoggedModelTagsRequest) o; + return Objects.equals(modelId, that.modelId) && Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash(modelId, tags); + } + + @Override + public String toString() { + return new ToStringer(SetLoggedModelTagsRequest.class) + .add("modelId", modelId) + .add("tags", tags) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsResponse.java new file mode 100755 index 000000000..924dacc20 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsResponse.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +public class SetLoggedModelTagsResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(SetLoggedModelTagsResponse.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/FederationPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/FederationPolicy.java index f8949b29f..18db0601f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/FederationPolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/FederationPolicy.java @@ -33,6 +33,17 @@ public class FederationPolicy { @JsonProperty("oidc_policy") private OidcFederationPolicy oidcPolicy; + /** The ID of the federation policy. */ + @JsonProperty("policy_id") + private String policyId; + + /** + * The service principal ID that this federation policy applies to. Only set for service principal + * federation policies. + */ + @JsonProperty("service_principal_id") + private Long servicePrincipalId; + /** Unique, immutable id of the federation policy. */ @JsonProperty("uid") private String uid; @@ -77,6 +88,24 @@ public OidcFederationPolicy getOidcPolicy() { return oidcPolicy; } + public FederationPolicy setPolicyId(String policyId) { + this.policyId = policyId; + return this; + } + + public String getPolicyId() { + return policyId; + } + + public FederationPolicy setServicePrincipalId(Long servicePrincipalId) { + this.servicePrincipalId = servicePrincipalId; + return this; + } + + public Long getServicePrincipalId() { + return servicePrincipalId; + } + public FederationPolicy setUid(String uid) { this.uid = uid; return this; @@ -104,13 +133,16 @@ public boolean equals(Object o) { && Objects.equals(description, that.description) && Objects.equals(name, that.name) && Objects.equals(oidcPolicy, that.oidcPolicy) + && Objects.equals(policyId, that.policyId) + && Objects.equals(servicePrincipalId, that.servicePrincipalId) && Objects.equals(uid, that.uid) && Objects.equals(updateTime, that.updateTime); } @Override public int hashCode() { - return Objects.hash(createTime, description, name, oidcPolicy, uid, updateTime); + return Objects.hash( + createTime, description, name, oidcPolicy, policyId, servicePrincipalId, uid, updateTime); } @Override @@ -120,6 +152,8 @@ public String toString() { .add("description", description) .add("name", name) .add("oidcPolicy", oidcPolicy) + .add("policyId", policyId) + .add("servicePrincipalId", servicePrincipalId) .add("uid", uid) .add("updateTime", updateTime) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java index 6188069dc..7533ab6c4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java @@ -103,6 +103,14 @@ public class CreatePipeline { @JsonProperty("restart_window") private RestartWindow restartWindow; + /** + * Root path for this pipeline. This is used as the root directory when editing the pipeline in + * the Databricks user interface and it is added to sys.path when executing Python sources during + * pipeline execution. + */ + @JsonProperty("root_path") + private String rootPath; + /** * Write-only setting, available only in Create/Update calls. Specifies the user or service * principal that the pipeline runs as. If not specified, the pipeline runs as the user who @@ -327,6 +335,15 @@ public RestartWindow getRestartWindow() { return restartWindow; } + public CreatePipeline setRootPath(String rootPath) { + this.rootPath = rootPath; + return this; + } + + public String getRootPath() { + return rootPath; + } + public CreatePipeline setRunAs(RunAs runAs) { this.runAs = runAs; return this; @@ -407,6 +424,7 @@ public boolean equals(Object o) { && Objects.equals(notifications, that.notifications) && Objects.equals(photon, that.photon) && Objects.equals(restartWindow, that.restartWindow) + && Objects.equals(rootPath, that.rootPath) && Objects.equals(runAs, that.runAs) && Objects.equals(schema, that.schema) && Objects.equals(serverless, that.serverless) @@ -439,6 +457,7 @@ public int hashCode() { notifications, photon, restartWindow, + rootPath, runAs, schema, serverless, @@ -471,6 +490,7 @@ public String toString() { .add("notifications", notifications) .add("photon", photon) .add("restartWindow", restartWindow) + .add("rootPath", rootPath) .add("runAs", runAs) .add("schema", schema) .add("serverless", serverless) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java index 776105875..444759473 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java @@ -112,6 +112,14 @@ public class EditPipeline { @JsonProperty("restart_window") private RestartWindow restartWindow; + /** + * Root path for this pipeline. This is used as the root directory when editing the pipeline in + * the Databricks user interface and it is added to sys.path when executing Python sources during + * pipeline execution. + */ + @JsonProperty("root_path") + private String rootPath; + /** * Write-only setting, available only in Create/Update calls. Specifies the user or service * principal that the pipeline runs as. If not specified, the pipeline runs as the user who @@ -345,6 +353,15 @@ public RestartWindow getRestartWindow() { return restartWindow; } + public EditPipeline setRootPath(String rootPath) { + this.rootPath = rootPath; + return this; + } + + public String getRootPath() { + return rootPath; + } + public EditPipeline setRunAs(RunAs runAs) { this.runAs = runAs; return this; @@ -426,6 +443,7 @@ public boolean equals(Object o) { && Objects.equals(photon, that.photon) && Objects.equals(pipelineId, that.pipelineId) && Objects.equals(restartWindow, that.restartWindow) + && Objects.equals(rootPath, that.rootPath) && Objects.equals(runAs, that.runAs) && Objects.equals(schema, that.schema) && Objects.equals(serverless, that.serverless) @@ -459,6 +477,7 @@ public int hashCode() { photon, pipelineId, restartWindow, + rootPath, runAs, schema, serverless, @@ -492,6 +511,7 @@ public String toString() { .add("photon", photon) .add("pipelineId", pipelineId) .add("restartWindow", restartWindow) + .add("rootPath", rootPath) .add("runAs", runAs) .add("schema", schema) .add("serverless", serverless) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java index a20b36e95..913972a57 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java @@ -95,6 +95,14 @@ public class PipelineSpec { @JsonProperty("restart_window") private RestartWindow restartWindow; + /** + * Root path for this pipeline. This is used as the root directory when editing the pipeline in + * the Databricks user interface and it is added to sys.path when executing Python sources during + * pipeline execution. + */ + @JsonProperty("root_path") + private String rootPath; + /** The default schema (database) where tables are read from or published to. */ @JsonProperty("schema") private String schema; @@ -290,6 +298,15 @@ public RestartWindow getRestartWindow() { return restartWindow; } + public PipelineSpec setRootPath(String rootPath) { + this.rootPath = rootPath; + return this; + } + + public String getRootPath() { + return rootPath; + } + public PipelineSpec setSchema(String schema) { this.schema = schema; return this; @@ -359,6 +376,7 @@ public boolean equals(Object o) { && Objects.equals(notifications, that.notifications) && Objects.equals(photon, that.photon) && Objects.equals(restartWindow, that.restartWindow) + && Objects.equals(rootPath, that.rootPath) && Objects.equals(schema, that.schema) && Objects.equals(serverless, that.serverless) && Objects.equals(storage, that.storage) @@ -388,6 +406,7 @@ public int hashCode() { notifications, photon, restartWindow, + rootPath, schema, serverless, storage, @@ -417,6 +436,7 @@ public String toString() { .add("notifications", notifications) .add("photon", photon) .add("restartWindow", restartWindow) + .add("rootPath", rootPath) .add("schema", schema) .add("serverless", serverless) .add("storage", storage) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicy.java new file mode 100755 index 000000000..d2516835c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicy.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AccountNetworkPolicy { + /** The associated account ID for this Network Policy object. */ + @JsonProperty("account_id") + private String accountId; + + /** The network policies applying for egress traffic. */ + @JsonProperty("egress") + private NetworkPolicyEgress egress; + + /** The unique identifier for the network policy. */ + @JsonProperty("network_policy_id") + private String networkPolicyId; + + public AccountNetworkPolicy setAccountId(String accountId) { + this.accountId = accountId; + return this; + } + + public String getAccountId() { + return accountId; + } + + public AccountNetworkPolicy setEgress(NetworkPolicyEgress egress) { + this.egress = egress; + return this; + } + + public NetworkPolicyEgress getEgress() { + return egress; + } + + public AccountNetworkPolicy setNetworkPolicyId(String networkPolicyId) { + this.networkPolicyId = networkPolicyId; + return this; + } + + public String getNetworkPolicyId() { + return networkPolicyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AccountNetworkPolicy that = (AccountNetworkPolicy) o; + return Objects.equals(accountId, that.accountId) + && Objects.equals(egress, that.egress) + && Objects.equals(networkPolicyId, that.networkPolicyId); + } + + @Override + public int hashCode() { + return Objects.hash(accountId, egress, networkPolicyId); + } + + @Override + public String toString() { + return new ToStringer(AccountNetworkPolicy.class) + .add("accountId", accountId) + .add("egress", egress) + .add("networkPolicyId", networkPolicyId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkPolicyRequest.java new file mode 100755 index 000000000..8afee90dd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkPolicyRequest.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Create a network policy */ +@Generated +public class CreateNetworkPolicyRequest { + /** */ + @JsonProperty("network_policy") + private AccountNetworkPolicy networkPolicy; + + public CreateNetworkPolicyRequest setNetworkPolicy(AccountNetworkPolicy networkPolicy) { + this.networkPolicy = networkPolicy; + return this; + } + + public AccountNetworkPolicy getNetworkPolicy() { + return networkPolicy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateNetworkPolicyRequest that = (CreateNetworkPolicyRequest) o; + return Objects.equals(networkPolicy, that.networkPolicy); + } + + @Override + public int hashCode() { + return Objects.hash(networkPolicy); + } + + @Override + public String toString() { + return new ToStringer(CreateNetworkPolicyRequest.class) + .add("networkPolicy", networkPolicy) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkPolicyRequest.java new file mode 100755 index 000000000..cd646bba1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkPolicyRequest.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a network policy */ +@Generated +public class DeleteNetworkPolicyRequest { + /** The unique identifier of the network policy to delete. */ + @JsonIgnore private String networkPolicyId; + + public DeleteNetworkPolicyRequest setNetworkPolicyId(String networkPolicyId) { + this.networkPolicyId = networkPolicyId; + return this; + } + + public String getNetworkPolicyId() { + return networkPolicyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteNetworkPolicyRequest that = (DeleteNetworkPolicyRequest) o; + return Objects.equals(networkPolicyId, that.networkPolicyId); + } + + @Override + public int hashCode() { + return Objects.hash(networkPolicyId); + } + + @Override + public String toString() { + return new ToStringer(DeleteNetworkPolicyRequest.class) + .add("networkPolicyId", networkPolicyId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkPolicyRpcResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkPolicyRpcResponse.java new file mode 100755 index 000000000..5d161f110 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkPolicyRpcResponse.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +public class DeleteNetworkPolicyRpcResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteNetworkPolicyRpcResponse.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsAPI.java index 821acb2b5..097e6c61b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsAPI.java @@ -7,8 +7,14 @@ import org.slf4j.LoggerFactory; /** - * When this setting is on, access to DBFS root and DBFS mounts is disallowed (as well as creation - * of new mounts). When the setting is off, all DBFS functionality is enabled + * Disabling legacy DBFS has the following implications: + * + *

1. Access to DBFS root and DBFS mounts is disallowed (as well as the creation of new mounts). + * 2. Disables Databricks Runtime versions prior to 13.3LTS. + * + *

When the setting is off, all DBFS functionality is enabled and no restrictions are imposed on + * Databricks Runtime versions. This setting can take up to 20 minutes to take effect and requires a + * manual restart of all-purpose compute clusters and SQL warehouses. */ @Generated public class DisableLegacyDbfsAPI { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsService.java index 6a4aa74fe..28e7572cf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsService.java @@ -4,8 +4,14 @@ import com.databricks.sdk.support.Generated; /** - * When this setting is on, access to DBFS root and DBFS mounts is disallowed (as well as creation - * of new mounts). When the setting is off, all DBFS functionality is enabled + * Disabling legacy DBFS has the following implications: + * + *

1. Access to DBFS root and DBFS mounts is disallowed (as well as the creation of new mounts). + * 2. Disables Databricks Runtime versions prior to 13.3LTS. + * + *

When the setting is off, all DBFS functionality is enabled and no restrictions are imposed on + * Databricks Runtime versions. This setting can take up to 20 minutes to take effect and requires a + * manual restart of all-purpose compute clusters and SQL warehouses. * *

This is the high-level interface, that contains generated methods. * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicy.java new file mode 100755 index 000000000..886b66041 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicy.java @@ -0,0 +1,110 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class EgressNetworkPolicyNetworkAccessPolicy { + /** + * List of internet destinations that serverless workloads are allowed to access when in + * RESTRICTED_ACCESS mode. + */ + @JsonProperty("allowed_internet_destinations") + private Collection + allowedInternetDestinations; + + /** + * List of storage destinations that serverless workloads are allowed to access when in + * RESTRICTED_ACCESS mode. + */ + @JsonProperty("allowed_storage_destinations") + private Collection + allowedStorageDestinations; + + /** Optional. When policy_enforcement is not provided, we default to ENFORCE_MODE_ALL_SERVICES */ + @JsonProperty("policy_enforcement") + private EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement policyEnforcement; + + /** The restriction mode that controls how serverless workloads can access the internet. */ + @JsonProperty("restriction_mode") + private EgressNetworkPolicyNetworkAccessPolicyRestrictionMode restrictionMode; + + public EgressNetworkPolicyNetworkAccessPolicy setAllowedInternetDestinations( + Collection + allowedInternetDestinations) { + this.allowedInternetDestinations = allowedInternetDestinations; + return this; + } + + public Collection + getAllowedInternetDestinations() { + return allowedInternetDestinations; + } + + public EgressNetworkPolicyNetworkAccessPolicy setAllowedStorageDestinations( + Collection + allowedStorageDestinations) { + this.allowedStorageDestinations = allowedStorageDestinations; + return this; + } + + public Collection + getAllowedStorageDestinations() { + return allowedStorageDestinations; + } + + public EgressNetworkPolicyNetworkAccessPolicy setPolicyEnforcement( + EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement policyEnforcement) { + this.policyEnforcement = policyEnforcement; + return this; + } + + public EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement getPolicyEnforcement() { + return policyEnforcement; + } + + public EgressNetworkPolicyNetworkAccessPolicy setRestrictionMode( + EgressNetworkPolicyNetworkAccessPolicyRestrictionMode restrictionMode) { + this.restrictionMode = restrictionMode; + return this; + } + + public EgressNetworkPolicyNetworkAccessPolicyRestrictionMode getRestrictionMode() { + return restrictionMode; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EgressNetworkPolicyNetworkAccessPolicy that = (EgressNetworkPolicyNetworkAccessPolicy) o; + return Objects.equals(allowedInternetDestinations, that.allowedInternetDestinations) + && Objects.equals(allowedStorageDestinations, that.allowedStorageDestinations) + && Objects.equals(policyEnforcement, that.policyEnforcement) + && Objects.equals(restrictionMode, that.restrictionMode); + } + + @Override + public int hashCode() { + return Objects.hash( + allowedInternetDestinations, + allowedStorageDestinations, + policyEnforcement, + restrictionMode); + } + + @Override + public String toString() { + return new ToStringer(EgressNetworkPolicyNetworkAccessPolicy.class) + .add("allowedInternetDestinations", allowedInternetDestinations) + .add("allowedStorageDestinations", allowedStorageDestinations) + .add("policyEnforcement", policyEnforcement) + .add("restrictionMode", restrictionMode) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyInternetDestination.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyInternetDestination.java new file mode 100755 index 000000000..4fbbea293 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyInternetDestination.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Users can specify accessible internet destinations when outbound access is restricted. We only + * support DNS_NAME (FQDN format) destinations for the time being. Going forward we may extend + * support to host names and IP addresses. + */ +@Generated +public class EgressNetworkPolicyNetworkAccessPolicyInternetDestination { + /** + * The internet destination to which access will be allowed. Format dependent on the destination + * type. + */ + @JsonProperty("destination") + private String destination; + + /** The type of internet destination. Currently only DNS_NAME is supported. */ + @JsonProperty("internet_destination_type") + private EgressNetworkPolicyNetworkAccessPolicyInternetDestinationInternetDestinationType + internetDestinationType; + + public EgressNetworkPolicyNetworkAccessPolicyInternetDestination setDestination( + String destination) { + this.destination = destination; + return this; + } + + public String getDestination() { + return destination; + } + + public EgressNetworkPolicyNetworkAccessPolicyInternetDestination setInternetDestinationType( + EgressNetworkPolicyNetworkAccessPolicyInternetDestinationInternetDestinationType + internetDestinationType) { + this.internetDestinationType = internetDestinationType; + return this; + } + + public EgressNetworkPolicyNetworkAccessPolicyInternetDestinationInternetDestinationType + getInternetDestinationType() { + return internetDestinationType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EgressNetworkPolicyNetworkAccessPolicyInternetDestination that = + (EgressNetworkPolicyNetworkAccessPolicyInternetDestination) o; + return Objects.equals(destination, that.destination) + && Objects.equals(internetDestinationType, that.internetDestinationType); + } + + @Override + public int hashCode() { + return Objects.hash(destination, internetDestinationType); + } + + @Override + public String toString() { + return new ToStringer(EgressNetworkPolicyNetworkAccessPolicyInternetDestination.class) + .add("destination", destination) + .add("internetDestinationType", internetDestinationType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyInternetDestinationInternetDestinationType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyInternetDestinationInternetDestinationType.java new file mode 100755 index 000000000..e521762a8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyInternetDestinationInternetDestinationType.java @@ -0,0 +1,10 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum EgressNetworkPolicyNetworkAccessPolicyInternetDestinationInternetDestinationType { + DNS_NAME, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement.java new file mode 100755 index 000000000..feacf01a5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement { + /** + * When empty, it means dry run for all products. When non-empty, it means dry run for specific + * products and for the other products, they will run in enforced mode. + */ + @JsonProperty("dry_run_mode_product_filter") + private Collection + dryRunModeProductFilter; + + /** + * The mode of policy enforcement. ENFORCED blocks traffic that violates policy, while DRY_RUN + * only logs violations without blocking. When not specified, defaults to ENFORCED. + */ + @JsonProperty("enforcement_mode") + private EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementEnforcementMode enforcementMode; + + public EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement setDryRunModeProductFilter( + Collection + dryRunModeProductFilter) { + this.dryRunModeProductFilter = dryRunModeProductFilter; + return this; + } + + public Collection + getDryRunModeProductFilter() { + return dryRunModeProductFilter; + } + + public EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement setEnforcementMode( + EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementEnforcementMode enforcementMode) { + this.enforcementMode = enforcementMode; + return this; + } + + public EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementEnforcementMode + getEnforcementMode() { + return enforcementMode; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement that = + (EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement) o; + return Objects.equals(dryRunModeProductFilter, that.dryRunModeProductFilter) + && Objects.equals(enforcementMode, that.enforcementMode); + } + + @Override + public int hashCode() { + return Objects.hash(dryRunModeProductFilter, enforcementMode); + } + + @Override + public String toString() { + return new ToStringer(EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement.class) + .add("dryRunModeProductFilter", dryRunModeProductFilter) + .add("enforcementMode", enforcementMode) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementDryRunModeProductFilter.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementDryRunModeProductFilter.java new file mode 100755 index 000000000..1a9687a5f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementDryRunModeProductFilter.java @@ -0,0 +1,12 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +/** The values should match the list of workloads used in networkconfig.proto */ +@Generated +public enum EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementDryRunModeProductFilter { + DBSQL, + ML_SERVING, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementEnforcementMode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementEnforcementMode.java new file mode 100755 index 000000000..018aa87c1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementEnforcementMode.java @@ -0,0 +1,11 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementEnforcementMode { + DRY_RUN, + ENFORCED, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyRestrictionMode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyRestrictionMode.java new file mode 100755 index 000000000..5e5182620 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyRestrictionMode.java @@ -0,0 +1,17 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +/** + * At which level can Databricks and Databricks managed compute access Internet. FULL_ACCESS: + * Databricks can access Internet. No blocking rules will apply. RESTRICTED_ACCESS: Databricks can + * only access explicitly allowed internet and storage destinations, as well as UC connections and + * external locations. + */ +@Generated +public enum EgressNetworkPolicyNetworkAccessPolicyRestrictionMode { + FULL_ACCESS, + RESTRICTED_ACCESS, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyStorageDestination.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyStorageDestination.java new file mode 100755 index 000000000..8ea733247 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyStorageDestination.java @@ -0,0 +1,113 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Users can specify accessible storage destinations. */ +@Generated +public class EgressNetworkPolicyNetworkAccessPolicyStorageDestination { + /** The Azure storage account name. */ + @JsonProperty("azure_storage_account") + private String azureStorageAccount; + + /** The Azure storage service type (blob, dfs, etc.). */ + @JsonProperty("azure_storage_service") + private String azureStorageService; + + /** */ + @JsonProperty("bucket_name") + private String bucketName; + + /** The region of the S3 bucket. */ + @JsonProperty("region") + private String region; + + /** The type of storage destination. */ + @JsonProperty("storage_destination_type") + private EgressNetworkPolicyNetworkAccessPolicyStorageDestinationStorageDestinationType + storageDestinationType; + + public EgressNetworkPolicyNetworkAccessPolicyStorageDestination setAzureStorageAccount( + String azureStorageAccount) { + this.azureStorageAccount = azureStorageAccount; + return this; + } + + public String getAzureStorageAccount() { + return azureStorageAccount; + } + + public EgressNetworkPolicyNetworkAccessPolicyStorageDestination setAzureStorageService( + String azureStorageService) { + this.azureStorageService = azureStorageService; + return this; + } + + public String getAzureStorageService() { + return azureStorageService; + } + + public EgressNetworkPolicyNetworkAccessPolicyStorageDestination setBucketName(String bucketName) { + this.bucketName = bucketName; + return this; + } + + public String getBucketName() { + return bucketName; + } + + public EgressNetworkPolicyNetworkAccessPolicyStorageDestination setRegion(String region) { + this.region = region; + return this; + } + + public String getRegion() { + return region; + } + + public EgressNetworkPolicyNetworkAccessPolicyStorageDestination setStorageDestinationType( + EgressNetworkPolicyNetworkAccessPolicyStorageDestinationStorageDestinationType + storageDestinationType) { + this.storageDestinationType = storageDestinationType; + return this; + } + + public EgressNetworkPolicyNetworkAccessPolicyStorageDestinationStorageDestinationType + getStorageDestinationType() { + return storageDestinationType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EgressNetworkPolicyNetworkAccessPolicyStorageDestination that = + (EgressNetworkPolicyNetworkAccessPolicyStorageDestination) o; + return Objects.equals(azureStorageAccount, that.azureStorageAccount) + && Objects.equals(azureStorageService, that.azureStorageService) + && Objects.equals(bucketName, that.bucketName) + && Objects.equals(region, that.region) + && Objects.equals(storageDestinationType, that.storageDestinationType); + } + + @Override + public int hashCode() { + return Objects.hash( + azureStorageAccount, azureStorageService, bucketName, region, storageDestinationType); + } + + @Override + public String toString() { + return new ToStringer(EgressNetworkPolicyNetworkAccessPolicyStorageDestination.class) + .add("azureStorageAccount", azureStorageAccount) + .add("azureStorageService", azureStorageService) + .add("bucketName", bucketName) + .add("region", region) + .add("storageDestinationType", storageDestinationType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyStorageDestinationStorageDestinationType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyStorageDestinationStorageDestinationType.java new file mode 100755 index 000000000..5f2984886 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyStorageDestinationStorageDestinationType.java @@ -0,0 +1,12 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum EgressNetworkPolicyNetworkAccessPolicyStorageDestinationStorageDestinationType { + AWS_S3, + AZURE_STORAGE, + GOOGLE_CLOUD_STORAGE, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNetworkPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNetworkPolicyRequest.java new file mode 100755 index 000000000..754bd62c0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNetworkPolicyRequest.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a network policy */ +@Generated +public class GetNetworkPolicyRequest { + /** The unique identifier of the network policy to retrieve. */ + @JsonIgnore private String networkPolicyId; + + public GetNetworkPolicyRequest setNetworkPolicyId(String networkPolicyId) { + this.networkPolicyId = networkPolicyId; + return this; + } + + public String getNetworkPolicyId() { + return networkPolicyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetNetworkPolicyRequest that = (GetNetworkPolicyRequest) o; + return Objects.equals(networkPolicyId, that.networkPolicyId); + } + + @Override + public int hashCode() { + return Objects.hash(networkPolicyId); + } + + @Override + public String toString() { + return new ToStringer(GetNetworkPolicyRequest.class) + .add("networkPolicyId", networkPolicyId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetWorkspaceNetworkOptionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetWorkspaceNetworkOptionRequest.java new file mode 100755 index 000000000..5344f3325 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetWorkspaceNetworkOptionRequest.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get workspace network configuration */ +@Generated +public class GetWorkspaceNetworkOptionRequest { + /** The workspace ID. */ + @JsonIgnore private Long workspaceId; + + public GetWorkspaceNetworkOptionRequest setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetWorkspaceNetworkOptionRequest that = (GetWorkspaceNetworkOptionRequest) o; + return Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash(workspaceId); + } + + @Override + public String toString() { + return new ToStringer(GetWorkspaceNetworkOptionRequest.class) + .add("workspaceId", workspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkPoliciesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkPoliciesRequest.java new file mode 100755 index 000000000..963c735cc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkPoliciesRequest.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List network policies */ +@Generated +public class ListNetworkPoliciesRequest { + /** Pagination token to go to next page based on previous query. */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListNetworkPoliciesRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListNetworkPoliciesRequest that = (ListNetworkPoliciesRequest) o; + return Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListNetworkPoliciesRequest.class).add("pageToken", pageToken).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkPoliciesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkPoliciesResponse.java new file mode 100755 index 000000000..5574a6dc4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkPoliciesResponse.java @@ -0,0 +1,62 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListNetworkPoliciesResponse { + /** List of network policies. */ + @JsonProperty("items") + private Collection items; + + /** + * A token that can be used to get the next page of results. If null, there are no more results to + * show. + */ + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListNetworkPoliciesResponse setItems(Collection items) { + this.items = items; + return this; + } + + public Collection getItems() { + return items; + } + + public ListNetworkPoliciesResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListNetworkPoliciesResponse that = (ListNetworkPoliciesResponse) o; + return Objects.equals(items, that.items) && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(items, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListNetworkPoliciesResponse.class) + .add("items", items) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPoliciesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPoliciesAPI.java new file mode 100755 index 000000000..effba4aad --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPoliciesAPI.java @@ -0,0 +1,113 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * These APIs manage network policies for this account. Network policies control which network + * destinations can be accessed from the Databricks environment. Each Databricks account includes a + * default policy named 'default-policy'. 'default-policy' is associated with any workspace lacking + * an explicit network policy assignment, and is automatically associated with each newly created + * workspace. 'default-policy' is reserved and cannot be deleted, but it can be updated to customize + * the default network access rules for your account. + */ +@Generated +public class NetworkPoliciesAPI { + private static final Logger LOG = LoggerFactory.getLogger(NetworkPoliciesAPI.class); + + private final NetworkPoliciesService impl; + + /** Regular-use constructor */ + public NetworkPoliciesAPI(ApiClient apiClient) { + impl = new NetworkPoliciesImpl(apiClient); + } + + /** Constructor for mocks */ + public NetworkPoliciesAPI(NetworkPoliciesService mock) { + impl = mock; + } + + public AccountNetworkPolicy createNetworkPolicyRpc(AccountNetworkPolicy networkPolicy) { + return createNetworkPolicyRpc(new CreateNetworkPolicyRequest().setNetworkPolicy(networkPolicy)); + } + + /** + * Create a network policy. + * + *

Creates a new network policy to manage which network destinations can be accessed from the + * Databricks environment. + */ + public AccountNetworkPolicy createNetworkPolicyRpc(CreateNetworkPolicyRequest request) { + return impl.createNetworkPolicyRpc(request); + } + + public void deleteNetworkPolicyRpc(String networkPolicyId) { + deleteNetworkPolicyRpc(new DeleteNetworkPolicyRequest().setNetworkPolicyId(networkPolicyId)); + } + + /** + * Delete a network policy. + * + *

Deletes a network policy. Cannot be called on 'default-policy'. + */ + public void deleteNetworkPolicyRpc(DeleteNetworkPolicyRequest request) { + impl.deleteNetworkPolicyRpc(request); + } + + public AccountNetworkPolicy getNetworkPolicyRpc(String networkPolicyId) { + return getNetworkPolicyRpc(new GetNetworkPolicyRequest().setNetworkPolicyId(networkPolicyId)); + } + + /** + * Get a network policy. + * + *

Gets a network policy. + */ + public AccountNetworkPolicy getNetworkPolicyRpc(GetNetworkPolicyRequest request) { + return impl.getNetworkPolicyRpc(request); + } + + /** + * List network policies. + * + *

Gets an array of network policies. + */ + public Iterable listNetworkPoliciesRpc(ListNetworkPoliciesRequest request) { + return new Paginator<>( + request, + impl::listNetworkPoliciesRpc, + ListNetworkPoliciesResponse::getItems, + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); + } + + public AccountNetworkPolicy updateNetworkPolicyRpc( + String networkPolicyId, AccountNetworkPolicy networkPolicy) { + return updateNetworkPolicyRpc( + new UpdateNetworkPolicyRequest() + .setNetworkPolicyId(networkPolicyId) + .setNetworkPolicy(networkPolicy)); + } + + /** + * Update a network policy. + * + *

Updates a network policy. This allows you to modify the configuration of a network policy. + */ + public AccountNetworkPolicy updateNetworkPolicyRpc(UpdateNetworkPolicyRequest request) { + return impl.updateNetworkPolicyRpc(request); + } + + public NetworkPoliciesService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPoliciesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPoliciesImpl.java new file mode 100755 index 000000000..1f9f29054 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPoliciesImpl.java @@ -0,0 +1,96 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.support.Generated; +import java.io.IOException; + +/** Package-local implementation of NetworkPolicies */ +@Generated +class NetworkPoliciesImpl implements NetworkPoliciesService { + private final ApiClient apiClient; + + public NetworkPoliciesImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public AccountNetworkPolicy createNetworkPolicyRpc(CreateNetworkPolicyRequest request) { + String path = + String.format("/api/2.0/accounts/%s/network-policies", apiClient.configuredAccountID()); + try { + Request req = new Request("POST", path, apiClient.serialize(request.getNetworkPolicy())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, AccountNetworkPolicy.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteNetworkPolicyRpc(DeleteNetworkPolicyRequest request) { + String path = + String.format( + "/api/2.0/accounts/%s/network-policies/%s", + apiClient.configuredAccountID(), request.getNetworkPolicyId()); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteNetworkPolicyRpcResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public AccountNetworkPolicy getNetworkPolicyRpc(GetNetworkPolicyRequest request) { + String path = + String.format( + "/api/2.0/accounts/%s/network-policies/%s", + apiClient.configuredAccountID(), request.getNetworkPolicyId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, AccountNetworkPolicy.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ListNetworkPoliciesResponse listNetworkPoliciesRpc(ListNetworkPoliciesRequest request) { + String path = + String.format("/api/2.0/accounts/%s/network-policies", apiClient.configuredAccountID()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListNetworkPoliciesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public AccountNetworkPolicy updateNetworkPolicyRpc(UpdateNetworkPolicyRequest request) { + String path = + String.format( + "/api/2.0/accounts/%s/network-policies/%s", + apiClient.configuredAccountID(), request.getNetworkPolicyId()); + try { + Request req = new Request("PUT", path, apiClient.serialize(request.getNetworkPolicy())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, AccountNetworkPolicy.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPoliciesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPoliciesService.java new file mode 100755 index 000000000..17cf556d1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPoliciesService.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +/** + * These APIs manage network policies for this account. Network policies control which network + * destinations can be accessed from the Databricks environment. Each Databricks account includes a + * default policy named 'default-policy'. 'default-policy' is associated with any workspace lacking + * an explicit network policy assignment, and is automatically associated with each newly created + * workspace. 'default-policy' is reserved and cannot be deleted, but it can be updated to customize + * the default network access rules for your account. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface NetworkPoliciesService { + /** + * Create a network policy. + * + *

Creates a new network policy to manage which network destinations can be accessed from the + * Databricks environment. + */ + AccountNetworkPolicy createNetworkPolicyRpc( + CreateNetworkPolicyRequest createNetworkPolicyRequest); + + /** + * Delete a network policy. + * + *

Deletes a network policy. Cannot be called on 'default-policy'. + */ + void deleteNetworkPolicyRpc(DeleteNetworkPolicyRequest deleteNetworkPolicyRequest); + + /** + * Get a network policy. + * + *

Gets a network policy. + */ + AccountNetworkPolicy getNetworkPolicyRpc(GetNetworkPolicyRequest getNetworkPolicyRequest); + + /** + * List network policies. + * + *

Gets an array of network policies. + */ + ListNetworkPoliciesResponse listNetworkPoliciesRpc( + ListNetworkPoliciesRequest listNetworkPoliciesRequest); + + /** + * Update a network policy. + * + *

Updates a network policy. This allows you to modify the configuration of a network policy. + */ + AccountNetworkPolicy updateNetworkPolicyRpc( + UpdateNetworkPolicyRequest updateNetworkPolicyRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPolicyEgress.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPolicyEgress.java new file mode 100755 index 000000000..a3b0dea02 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPolicyEgress.java @@ -0,0 +1,50 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * The network policies applying for egress traffic. This message is used by the UI/REST API. We + * translate this message to the format expected by the dataplane in Lakehouse Network Manager (for + * the format expected by the dataplane, see networkconfig.textproto). This policy should be + * consistent with [[com.databricks.api.proto.settingspolicy.EgressNetworkPolicy]]. Details see + * API-design: https://docs.google.com/document/d/1DKWO_FpZMCY4cF2O62LpwII1lx8gsnDGG-qgE3t3TOA/ + */ +@Generated +public class NetworkPolicyEgress { + /** The access policy enforced for egress traffic to the internet. */ + @JsonProperty("network_access") + private EgressNetworkPolicyNetworkAccessPolicy networkAccess; + + public NetworkPolicyEgress setNetworkAccess( + EgressNetworkPolicyNetworkAccessPolicy networkAccess) { + this.networkAccess = networkAccess; + return this; + } + + public EgressNetworkPolicyNetworkAccessPolicy getNetworkAccess() { + return networkAccess; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NetworkPolicyEgress that = (NetworkPolicyEgress) o; + return Objects.equals(networkAccess, that.networkAccess); + } + + @Override + public int hashCode() { + return Objects.hash(networkAccess); + } + + @Override + public String toString() { + return new ToStringer(NetworkPolicyEgress.class).add("networkAccess", networkAccess).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java index b0fba4f64..f5eb3d0a5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java @@ -112,10 +112,7 @@ public DisableLegacyAccessAPI DisableLegacyAccess() { return disableLegacyAccessAPI; } - /** - * When this setting is on, access to DBFS root and DBFS mounts is disallowed (as well as creation - * of new mounts). - */ + /** Disabling legacy DBFS has the following implications: 1. */ public DisableLegacyDbfsAPI DisableLegacyDbfs() { return disableLegacyDbfsAPI; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNetworkPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNetworkPolicyRequest.java new file mode 100755 index 000000000..82fb015bd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNetworkPolicyRequest.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Update a network policy */ +@Generated +public class UpdateNetworkPolicyRequest { + /** */ + @JsonProperty("network_policy") + private AccountNetworkPolicy networkPolicy; + + /** The unique identifier for the network policy. */ + @JsonIgnore private String networkPolicyId; + + public UpdateNetworkPolicyRequest setNetworkPolicy(AccountNetworkPolicy networkPolicy) { + this.networkPolicy = networkPolicy; + return this; + } + + public AccountNetworkPolicy getNetworkPolicy() { + return networkPolicy; + } + + public UpdateNetworkPolicyRequest setNetworkPolicyId(String networkPolicyId) { + this.networkPolicyId = networkPolicyId; + return this; + } + + public String getNetworkPolicyId() { + return networkPolicyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateNetworkPolicyRequest that = (UpdateNetworkPolicyRequest) o; + return Objects.equals(networkPolicy, that.networkPolicy) + && Objects.equals(networkPolicyId, that.networkPolicyId); + } + + @Override + public int hashCode() { + return Objects.hash(networkPolicy, networkPolicyId); + } + + @Override + public String toString() { + return new ToStringer(UpdateNetworkPolicyRequest.class) + .add("networkPolicy", networkPolicy) + .add("networkPolicyId", networkPolicyId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateWorkspaceNetworkOptionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateWorkspaceNetworkOptionRequest.java new file mode 100755 index 000000000..84c39c6b0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateWorkspaceNetworkOptionRequest.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Update workspace network configuration */ +@Generated +public class UpdateWorkspaceNetworkOptionRequest { + /** The workspace ID. */ + @JsonIgnore private Long workspaceId; + + /** */ + @JsonProperty("workspace_network_option") + private WorkspaceNetworkOption workspaceNetworkOption; + + public UpdateWorkspaceNetworkOptionRequest setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + public UpdateWorkspaceNetworkOptionRequest setWorkspaceNetworkOption( + WorkspaceNetworkOption workspaceNetworkOption) { + this.workspaceNetworkOption = workspaceNetworkOption; + return this; + } + + public WorkspaceNetworkOption getWorkspaceNetworkOption() { + return workspaceNetworkOption; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateWorkspaceNetworkOptionRequest that = (UpdateWorkspaceNetworkOptionRequest) o; + return Objects.equals(workspaceId, that.workspaceId) + && Objects.equals(workspaceNetworkOption, that.workspaceNetworkOption); + } + + @Override + public int hashCode() { + return Objects.hash(workspaceId, workspaceNetworkOption); + } + + @Override + public String toString() { + return new ToStringer(UpdateWorkspaceNetworkOptionRequest.class) + .add("workspaceId", workspaceId) + .add("workspaceNetworkOption", workspaceNetworkOption) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkConfigurationAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkConfigurationAPI.java new file mode 100755 index 000000000..90fe8feba --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkConfigurationAPI.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * These APIs allow configuration of network settings for Databricks workspaces. Each workspace is + * always associated with exactly one network policy that controls which network destinations can be + * accessed from the Databricks environment. By default, workspaces are associated with the + * 'default-policy' network policy. You cannot create or delete a workspace's network configuration, + * only update it to associate the workspace with a different policy. + */ +@Generated +public class WorkspaceNetworkConfigurationAPI { + private static final Logger LOG = LoggerFactory.getLogger(WorkspaceNetworkConfigurationAPI.class); + + private final WorkspaceNetworkConfigurationService impl; + + /** Regular-use constructor */ + public WorkspaceNetworkConfigurationAPI(ApiClient apiClient) { + impl = new WorkspaceNetworkConfigurationImpl(apiClient); + } + + /** Constructor for mocks */ + public WorkspaceNetworkConfigurationAPI(WorkspaceNetworkConfigurationService mock) { + impl = mock; + } + + public WorkspaceNetworkOption getWorkspaceNetworkOptionRpc(long workspaceId) { + return getWorkspaceNetworkOptionRpc( + new GetWorkspaceNetworkOptionRequest().setWorkspaceId(workspaceId)); + } + + /** + * Get workspace network configuration. + * + *

Gets the network configuration for a workspace. Every workspace has exactly one network + * policy binding, with 'default-policy' used if no explicit assignment exists. + */ + public WorkspaceNetworkOption getWorkspaceNetworkOptionRpc( + GetWorkspaceNetworkOptionRequest request) { + return impl.getWorkspaceNetworkOptionRpc(request); + } + + public WorkspaceNetworkOption updateWorkspaceNetworkOptionRpc( + long workspaceId, WorkspaceNetworkOption workspaceNetworkOption) { + return updateWorkspaceNetworkOptionRpc( + new UpdateWorkspaceNetworkOptionRequest() + .setWorkspaceId(workspaceId) + .setWorkspaceNetworkOption(workspaceNetworkOption)); + } + + /** + * Update workspace network configuration. + * + *

Updates the network configuration for a workspace. This operation associates the workspace + * with the specified network policy. To revert to the default policy, specify 'default-policy' as + * the network_policy_id. + */ + public WorkspaceNetworkOption updateWorkspaceNetworkOptionRpc( + UpdateWorkspaceNetworkOptionRequest request) { + return impl.updateWorkspaceNetworkOptionRpc(request); + } + + public WorkspaceNetworkConfigurationService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkConfigurationImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkConfigurationImpl.java new file mode 100755 index 000000000..b2e71a210 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkConfigurationImpl.java @@ -0,0 +1,54 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.support.Generated; +import java.io.IOException; + +/** Package-local implementation of WorkspaceNetworkConfiguration */ +@Generated +class WorkspaceNetworkConfigurationImpl implements WorkspaceNetworkConfigurationService { + private final ApiClient apiClient; + + public WorkspaceNetworkConfigurationImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public WorkspaceNetworkOption getWorkspaceNetworkOptionRpc( + GetWorkspaceNetworkOptionRequest request) { + String path = + String.format( + "/api/2.0/accounts/%s/workspaces/%s/network", + apiClient.configuredAccountID(), request.getWorkspaceId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, WorkspaceNetworkOption.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public WorkspaceNetworkOption updateWorkspaceNetworkOptionRpc( + UpdateWorkspaceNetworkOptionRequest request) { + String path = + String.format( + "/api/2.0/accounts/%s/workspaces/%s/network", + apiClient.configuredAccountID(), request.getWorkspaceId()); + try { + Request req = + new Request("PUT", path, apiClient.serialize(request.getWorkspaceNetworkOption())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, WorkspaceNetworkOption.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkConfigurationService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkConfigurationService.java new file mode 100755 index 000000000..7c414aa6d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkConfigurationService.java @@ -0,0 +1,37 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +/** + * These APIs allow configuration of network settings for Databricks workspaces. Each workspace is + * always associated with exactly one network policy that controls which network destinations can be + * accessed from the Databricks environment. By default, workspaces are associated with the + * 'default-policy' network policy. You cannot create or delete a workspace's network configuration, + * only update it to associate the workspace with a different policy. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface WorkspaceNetworkConfigurationService { + /** + * Get workspace network configuration. + * + *

Gets the network configuration for a workspace. Every workspace has exactly one network + * policy binding, with 'default-policy' used if no explicit assignment exists. + */ + WorkspaceNetworkOption getWorkspaceNetworkOptionRpc( + GetWorkspaceNetworkOptionRequest getWorkspaceNetworkOptionRequest); + + /** + * Update workspace network configuration. + * + *

Updates the network configuration for a workspace. This operation associates the workspace + * with the specified network policy. To revert to the default policy, specify 'default-policy' as + * the network_policy_id. + */ + WorkspaceNetworkOption updateWorkspaceNetworkOptionRpc( + UpdateWorkspaceNetworkOptionRequest updateWorkspaceNetworkOptionRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkOption.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkOption.java new file mode 100755 index 000000000..796892bf0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkOption.java @@ -0,0 +1,63 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class WorkspaceNetworkOption { + /** + * The network policy ID to apply to the workspace. This controls the network access rules for all + * serverless compute resources in the workspace. Each workspace can only be linked to one policy + * at a time. If no policy is explicitly assigned, the workspace will use 'default-policy'. + */ + @JsonProperty("network_policy_id") + private String networkPolicyId; + + /** The workspace ID. */ + @JsonProperty("workspace_id") + private Long workspaceId; + + public WorkspaceNetworkOption setNetworkPolicyId(String networkPolicyId) { + this.networkPolicyId = networkPolicyId; + return this; + } + + public String getNetworkPolicyId() { + return networkPolicyId; + } + + public WorkspaceNetworkOption setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WorkspaceNetworkOption that = (WorkspaceNetworkOption) o; + return Objects.equals(networkPolicyId, that.networkPolicyId) + && Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash(networkPolicyId, workspaceId); + } + + @Override + public String toString() { + return new ToStringer(WorkspaceNetworkOption.class) + .add("networkPolicyId", networkPolicyId) + .add("workspaceId", workspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateFederationPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateFederationPolicyRequest.java new file mode 100755 index 000000000..0f8a544c2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateFederationPolicyRequest.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Create recipient federation policy */ +@Generated +public class CreateFederationPolicyRequest { + /** */ + @JsonProperty("policy") + private FederationPolicy policy; + + /** + * Name of the recipient. This is the name of the recipient for which the policy is being created. + */ + @JsonIgnore private String recipientName; + + public CreateFederationPolicyRequest setPolicy(FederationPolicy policy) { + this.policy = policy; + return this; + } + + public FederationPolicy getPolicy() { + return policy; + } + + public CreateFederationPolicyRequest setRecipientName(String recipientName) { + this.recipientName = recipientName; + return this; + } + + public String getRecipientName() { + return recipientName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateFederationPolicyRequest that = (CreateFederationPolicyRequest) o; + return Objects.equals(policy, that.policy) && Objects.equals(recipientName, that.recipientName); + } + + @Override + public int hashCode() { + return Objects.hash(policy, recipientName); + } + + @Override + public String toString() { + return new ToStringer(CreateFederationPolicyRequest.class) + .add("policy", policy) + .add("recipientName", recipientName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteFederationPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteFederationPolicyRequest.java new file mode 100755 index 000000000..1453ea54e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteFederationPolicyRequest.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete recipient federation policy */ +@Generated +public class DeleteFederationPolicyRequest { + /** Name of the policy. This is the name of the policy to be deleted. */ + @JsonIgnore private String name; + + /** + * Name of the recipient. This is the name of the recipient for which the policy is being deleted. + */ + @JsonIgnore private String recipientName; + + public DeleteFederationPolicyRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public DeleteFederationPolicyRequest setRecipientName(String recipientName) { + this.recipientName = recipientName; + return this; + } + + public String getRecipientName() { + return recipientName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteFederationPolicyRequest that = (DeleteFederationPolicyRequest) o; + return Objects.equals(name, that.name) && Objects.equals(recipientName, that.recipientName); + } + + @Override + public int hashCode() { + return Objects.hash(name, recipientName); + } + + @Override + public String toString() { + return new ToStringer(DeleteFederationPolicyRequest.class) + .add("name", name) + .add("recipientName", recipientName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FederationPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FederationPolicy.java new file mode 100755 index 000000000..b8f41b300 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FederationPolicy.java @@ -0,0 +1,122 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class FederationPolicy { + /** Description of the policy. This is a user-provided description. */ + @JsonProperty("comment") + private String comment; + + /** System-generated timestamp indicating when the policy was created. */ + @JsonProperty("create_time") + private String createTime; + + /** Unique, immutable system-generated identifier for the federation policy. */ + @JsonProperty("id") + private String id; + + /** + * Name of the federation policy. A recipient can have multiple policies with different names. The + * name must contain only lowercase alphanumeric characters, numbers, and hyphens. + */ + @JsonProperty("name") + private String name; + + /** Specifies the policy to use for validating OIDC claims in the federated tokens. */ + @JsonProperty("oidc_policy") + private OidcFederationPolicy oidcPolicy; + + /** System-generated timestamp indicating when the policy was last updated. */ + @JsonProperty("update_time") + private String updateTime; + + public FederationPolicy setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public FederationPolicy setCreateTime(String createTime) { + this.createTime = createTime; + return this; + } + + public String getCreateTime() { + return createTime; + } + + public FederationPolicy setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public FederationPolicy setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public FederationPolicy setOidcPolicy(OidcFederationPolicy oidcPolicy) { + this.oidcPolicy = oidcPolicy; + return this; + } + + public OidcFederationPolicy getOidcPolicy() { + return oidcPolicy; + } + + public FederationPolicy setUpdateTime(String updateTime) { + this.updateTime = updateTime; + return this; + } + + public String getUpdateTime() { + return updateTime; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FederationPolicy that = (FederationPolicy) o; + return Objects.equals(comment, that.comment) + && Objects.equals(createTime, that.createTime) + && Objects.equals(id, that.id) + && Objects.equals(name, that.name) + && Objects.equals(oidcPolicy, that.oidcPolicy) + && Objects.equals(updateTime, that.updateTime); + } + + @Override + public int hashCode() { + return Objects.hash(comment, createTime, id, name, oidcPolicy, updateTime); + } + + @Override + public String toString() { + return new ToStringer(FederationPolicy.class) + .add("comment", comment) + .add("createTime", createTime) + .add("id", id) + .add("name", name) + .add("oidcPolicy", oidcPolicy) + .add("updateTime", updateTime) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetFederationPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetFederationPolicyRequest.java new file mode 100755 index 000000000..88320e5e1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetFederationPolicyRequest.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get recipient federation policy */ +@Generated +public class GetFederationPolicyRequest { + /** Name of the policy. This is the name of the policy to be retrieved. */ + @JsonIgnore private String name; + + /** + * Name of the recipient. This is the name of the recipient for which the policy is being + * retrieved. + */ + @JsonIgnore private String recipientName; + + public GetFederationPolicyRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public GetFederationPolicyRequest setRecipientName(String recipientName) { + this.recipientName = recipientName; + return this; + } + + public String getRecipientName() { + return recipientName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetFederationPolicyRequest that = (GetFederationPolicyRequest) o; + return Objects.equals(name, that.name) && Objects.equals(recipientName, that.recipientName); + } + + @Override + public int hashCode() { + return Objects.hash(name, recipientName); + } + + @Override + public String toString() { + return new ToStringer(GetFederationPolicyRequest.class) + .add("name", name) + .add("recipientName", recipientName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListFederationPoliciesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListFederationPoliciesRequest.java new file mode 100755 index 000000000..dcd317716 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListFederationPoliciesRequest.java @@ -0,0 +1,80 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List recipient federation policies */ +@Generated +public class ListFederationPoliciesRequest { + /** */ + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + /** */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + /** + * Name of the recipient. This is the name of the recipient for which the policies are being + * listed. + */ + @JsonIgnore private String recipientName; + + public ListFederationPoliciesRequest setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListFederationPoliciesRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListFederationPoliciesRequest setRecipientName(String recipientName) { + this.recipientName = recipientName; + return this; + } + + public String getRecipientName() { + return recipientName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListFederationPoliciesRequest that = (ListFederationPoliciesRequest) o; + return Objects.equals(maxResults, that.maxResults) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(recipientName, that.recipientName); + } + + @Override + public int hashCode() { + return Objects.hash(maxResults, pageToken, recipientName); + } + + @Override + public String toString() { + return new ToStringer(ListFederationPoliciesRequest.class) + .add("maxResults", maxResults) + .add("pageToken", pageToken) + .add("recipientName", recipientName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListFederationPoliciesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListFederationPoliciesResponse.java new file mode 100755 index 000000000..e91353bb2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListFederationPoliciesResponse.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListFederationPoliciesResponse { + /** */ + @JsonProperty("next_page_token") + private String nextPageToken; + + /** */ + @JsonProperty("policies") + private Collection policies; + + public ListFederationPoliciesResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListFederationPoliciesResponse setPolicies(Collection policies) { + this.policies = policies; + return this; + } + + public Collection getPolicies() { + return policies; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListFederationPoliciesResponse that = (ListFederationPoliciesResponse) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(policies, that.policies); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, policies); + } + + @Override + public String toString() { + return new ToStringer(ListFederationPoliciesResponse.class) + .add("nextPageToken", nextPageToken) + .add("policies", policies) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/OidcFederationPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/OidcFederationPolicy.java new file mode 100755 index 000000000..71cc1fffd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/OidcFederationPolicy.java @@ -0,0 +1,115 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** + * Specifies the policy to use for validating OIDC claims in your federated tokens from Delta + * Sharing Clients. Refer to https://docs.databricks.com/en/delta-sharing/create-recipient-oidc-fed + * for more details. + */ +@Generated +public class OidcFederationPolicy { + /** + * The allowed token audiences, as specified in the 'aud' claim of federated tokens. The audience + * identifier is intended to represent the recipient of the token. Can be any non-empty string + * value. As long as the audience in the token matches at least one audience in the policy, + */ + @JsonProperty("audiences") + private Collection audiences; + + /** The required token issuer, as specified in the 'iss' claim of federated tokens. */ + @JsonProperty("issuer") + private String issuer; + + /** + * The required token subject, as specified in the subject claim of federated tokens. The subject + * claim identifies the identity of the user or machine accessing the resource. Examples for Entra + * ID (AAD): - U2M flow (group access): If the subject claim is `groups`, this must be the Object + * ID of the group in Entra ID. - U2M flow (user access): If the subject claim is `oid`, this must + * be the Object ID of the user in Entra ID. - M2M flow (OAuth App access): If the subject claim + * is `azp`, this must be the client ID of the OAuth app registered in Entra ID. + */ + @JsonProperty("subject") + private String subject; + + /** + * The claim that contains the subject of the token. Depending on the identity provider and the + * use case (U2M or M2M), this can vary: - For Entra ID (AAD): * U2M flow (group access): Use + * `groups`. * U2M flow (user access): Use `oid`. * M2M flow (OAuth App access): Use `azp`. - For + * other IdPs, refer to the specific IdP documentation. + * + *

Supported `subject_claim` values are: - `oid`: Object ID of the user. - `azp`: Client ID of + * the OAuth app. - `groups`: Object ID of the group. - `sub`: Subject identifier for other use + * cases. + */ + @JsonProperty("subject_claim") + private String subjectClaim; + + public OidcFederationPolicy setAudiences(Collection audiences) { + this.audiences = audiences; + return this; + } + + public Collection getAudiences() { + return audiences; + } + + public OidcFederationPolicy setIssuer(String issuer) { + this.issuer = issuer; + return this; + } + + public String getIssuer() { + return issuer; + } + + public OidcFederationPolicy setSubject(String subject) { + this.subject = subject; + return this; + } + + public String getSubject() { + return subject; + } + + public OidcFederationPolicy setSubjectClaim(String subjectClaim) { + this.subjectClaim = subjectClaim; + return this; + } + + public String getSubjectClaim() { + return subjectClaim; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + OidcFederationPolicy that = (OidcFederationPolicy) o; + return Objects.equals(audiences, that.audiences) + && Objects.equals(issuer, that.issuer) + && Objects.equals(subject, that.subject) + && Objects.equals(subjectClaim, that.subjectClaim); + } + + @Override + public int hashCode() { + return Objects.hash(audiences, issuer, subject, subjectClaim); + } + + @Override + public String toString() { + return new ToStringer(OidcFederationPolicy.class) + .add("audiences", audiences) + .add("issuer", issuer) + .add("subject", subject) + .add("subjectClaim", subjectClaim) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesAPI.java new file mode 100755 index 000000000..fd10ac092 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesAPI.java @@ -0,0 +1,157 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * The Recipient Federation Policies APIs are only applicable in the open sharing model where the + * recipient object has the authentication type of `OIDC_RECIPIENT`, enabling data sharing from + * Databricks to non-Databricks recipients. OIDC Token Federation enables secure, secret-less + * authentication for accessing Delta Sharing servers. Users and applications authenticate using + * short-lived OIDC tokens issued by their own Identity Provider (IdP), such as Azure Entra ID or + * Okta, without the need for managing static credentials or client secrets. A federation policy + * defines how non-Databricks recipients authenticate using OIDC tokens. It validates the OIDC + * claims in federated tokens and is set at the recipient level. The caller must be the owner of the + * recipient to create or manage a federation policy. Federation policies support the following + * scenarios: - User-to-Machine (U2M) flow: A user accesses Delta Shares using their own identity, + * such as connecting through PowerBI Delta Sharing Client. - Machine-to-Machine (M2M) flow: An + * application accesses Delta Shares using its own identity, typically for automation tasks like + * nightly jobs through Python Delta Sharing Client. OIDC Token Federation enables fine-grained + * access control, supports Multi-Factor Authentication (MFA), and enhances security by minimizing + * the risk of credential leakage through the use of short-lived, expiring tokens. It is designed + * for strong identity governance, secure cross-platform data sharing, and reduced operational + * overhead for credential management. + * + *

For more information, see + * https://www.databricks.com/blog/announcing-oidc-token-federation-enhanced-delta-sharing-security + * and https://docs.databricks.com/en/delta-sharing/create-recipient-oidc-fed + */ +@Generated +public class RecipientFederationPoliciesAPI { + private static final Logger LOG = LoggerFactory.getLogger(RecipientFederationPoliciesAPI.class); + + private final RecipientFederationPoliciesService impl; + + /** Regular-use constructor */ + public RecipientFederationPoliciesAPI(ApiClient apiClient) { + impl = new RecipientFederationPoliciesImpl(apiClient); + } + + /** Constructor for mocks */ + public RecipientFederationPoliciesAPI(RecipientFederationPoliciesService mock) { + impl = mock; + } + + public FederationPolicy create(String recipientName, FederationPolicy policy) { + return create( + new CreateFederationPolicyRequest().setRecipientName(recipientName).setPolicy(policy)); + } + + /** + * Create recipient federation policy. + * + *

Create a federation policy for an OIDC_FEDERATION recipient for sharing data from Databricks + * to non-Databricks recipients. The caller must be the owner of the recipient. When sharing data + * from Databricks to non-Databricks clients, you can define a federation policy to authenticate + * non-Databricks recipients. The federation policy validates OIDC claims in federated tokens and + * is defined at the recipient level. This enables secretless sharing clients to authenticate + * using OIDC tokens. + * + *

Supported scenarios for federation policies: 1. **User-to-Machine (U2M) flow** (e.g., + * PowerBI): A user accesses a resource using their own identity. 2. **Machine-to-Machine (M2M) + * flow** (e.g., OAuth App): An OAuth App accesses a resource using its own identity, typically + * for tasks like running nightly jobs. + * + *

For an overview, refer to: - Blog post: Overview of feature: + * https://www.databricks.com/blog/announcing-oidc-token-federation-enhanced-delta-sharing-security + * + *

For detailed configuration guides based on your use case: - Creating a Federation Policy as + * a provider: https://docs.databricks.com/en/delta-sharing/create-recipient-oidc-fed - + * Configuration and usage for Machine-to-Machine (M2M) applications (e.g., Python Delta Sharing + * Client): https://docs.databricks.com/aws/en/delta-sharing/sharing-over-oidc-m2m - Configuration + * and usage for User-to-Machine (U2M) applications (e.g., PowerBI): + * https://docs.databricks.com/aws/en/delta-sharing/sharing-over-oidc-u2m + */ + public FederationPolicy create(CreateFederationPolicyRequest request) { + return impl.create(request); + } + + public void delete(String recipientName, String name) { + delete(new DeleteFederationPolicyRequest().setRecipientName(recipientName).setName(name)); + } + + /** + * Delete recipient federation policy. + * + *

Deletes an existing federation policy for an OIDC_FEDERATION recipient. The caller must be + * the owner of the recipient. + */ + public void delete(DeleteFederationPolicyRequest request) { + impl.delete(request); + } + + public FederationPolicy getFederationPolicy(String recipientName, String name) { + return getFederationPolicy( + new GetFederationPolicyRequest().setRecipientName(recipientName).setName(name)); + } + + /** + * Get recipient federation policy. + * + *

Reads an existing federation policy for an OIDC_FEDERATION recipient for sharing data from + * Databricks to non-Databricks recipients. The caller must have read access to the recipient. + */ + public FederationPolicy getFederationPolicy(GetFederationPolicyRequest request) { + return impl.getFederationPolicy(request); + } + + public Iterable list(String recipientName) { + return list(new ListFederationPoliciesRequest().setRecipientName(recipientName)); + } + + /** + * List recipient federation policies. + * + *

Lists federation policies for an OIDC_FEDERATION recipient for sharing data from Databricks + * to non-Databricks recipients. The caller must have read access to the recipient. + */ + public Iterable list(ListFederationPoliciesRequest request) { + return new Paginator<>( + request, + impl::list, + ListFederationPoliciesResponse::getPolicies, + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); + } + + public FederationPolicy update(String recipientName, String name, FederationPolicy policy) { + return update( + new UpdateFederationPolicyRequest() + .setRecipientName(recipientName) + .setName(name) + .setPolicy(policy)); + } + + /** + * Update recipient federation policy. + * + *

Updates an existing federation policy for an OIDC_RECIPIENT. The caller must be the owner of + * the recipient. + */ + public FederationPolicy update(UpdateFederationPolicyRequest request) { + return impl.update(request); + } + + public RecipientFederationPoliciesService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesImpl.java new file mode 100755 index 000000000..03b5136cf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesImpl.java @@ -0,0 +1,98 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.support.Generated; +import java.io.IOException; + +/** Package-local implementation of RecipientFederationPolicies */ +@Generated +class RecipientFederationPoliciesImpl implements RecipientFederationPoliciesService { + private final ApiClient apiClient; + + public RecipientFederationPoliciesImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public FederationPolicy create(CreateFederationPolicyRequest request) { + String path = + String.format( + "/api/2.0/data-sharing/recipients/%s/federation-policies", request.getRecipientName()); + try { + Request req = new Request("POST", path, apiClient.serialize(request.getPolicy())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, FederationPolicy.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void delete(DeleteFederationPolicyRequest request) { + String path = + String.format( + "/api/2.0/data-sharing/recipients/%s/federation-policies/%s", + request.getRecipientName(), request.getName()); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public FederationPolicy getFederationPolicy(GetFederationPolicyRequest request) { + String path = + String.format( + "/api/2.0/data-sharing/recipients/%s/federation-policies/%s", + request.getRecipientName(), request.getName()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, FederationPolicy.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ListFederationPoliciesResponse list(ListFederationPoliciesRequest request) { + String path = + String.format( + "/api/2.0/data-sharing/recipients/%s/federation-policies", request.getRecipientName()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListFederationPoliciesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public FederationPolicy update(UpdateFederationPolicyRequest request) { + String path = + String.format( + "/api/2.0/data-sharing/recipients/%s/federation-policies/%s", + request.getRecipientName(), request.getName()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request.getPolicy())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, FederationPolicy.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesService.java new file mode 100755 index 000000000..ccf79726a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesService.java @@ -0,0 +1,93 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; + +/** + * The Recipient Federation Policies APIs are only applicable in the open sharing model where the + * recipient object has the authentication type of `OIDC_RECIPIENT`, enabling data sharing from + * Databricks to non-Databricks recipients. OIDC Token Federation enables secure, secret-less + * authentication for accessing Delta Sharing servers. Users and applications authenticate using + * short-lived OIDC tokens issued by their own Identity Provider (IdP), such as Azure Entra ID or + * Okta, without the need for managing static credentials or client secrets. A federation policy + * defines how non-Databricks recipients authenticate using OIDC tokens. It validates the OIDC + * claims in federated tokens and is set at the recipient level. The caller must be the owner of the + * recipient to create or manage a federation policy. Federation policies support the following + * scenarios: - User-to-Machine (U2M) flow: A user accesses Delta Shares using their own identity, + * such as connecting through PowerBI Delta Sharing Client. - Machine-to-Machine (M2M) flow: An + * application accesses Delta Shares using its own identity, typically for automation tasks like + * nightly jobs through Python Delta Sharing Client. OIDC Token Federation enables fine-grained + * access control, supports Multi-Factor Authentication (MFA), and enhances security by minimizing + * the risk of credential leakage through the use of short-lived, expiring tokens. It is designed + * for strong identity governance, secure cross-platform data sharing, and reduced operational + * overhead for credential management. + * + *

For more information, see + * https://www.databricks.com/blog/announcing-oidc-token-federation-enhanced-delta-sharing-security + * and https://docs.databricks.com/en/delta-sharing/create-recipient-oidc-fed + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface RecipientFederationPoliciesService { + /** + * Create recipient federation policy. + * + *

Create a federation policy for an OIDC_FEDERATION recipient for sharing data from Databricks + * to non-Databricks recipients. The caller must be the owner of the recipient. When sharing data + * from Databricks to non-Databricks clients, you can define a federation policy to authenticate + * non-Databricks recipients. The federation policy validates OIDC claims in federated tokens and + * is defined at the recipient level. This enables secretless sharing clients to authenticate + * using OIDC tokens. + * + *

Supported scenarios for federation policies: 1. **User-to-Machine (U2M) flow** (e.g., + * PowerBI): A user accesses a resource using their own identity. 2. **Machine-to-Machine (M2M) + * flow** (e.g., OAuth App): An OAuth App accesses a resource using its own identity, typically + * for tasks like running nightly jobs. + * + *

For an overview, refer to: - Blog post: Overview of feature: + * https://www.databricks.com/blog/announcing-oidc-token-federation-enhanced-delta-sharing-security + * + *

For detailed configuration guides based on your use case: - Creating a Federation Policy as + * a provider: https://docs.databricks.com/en/delta-sharing/create-recipient-oidc-fed - + * Configuration and usage for Machine-to-Machine (M2M) applications (e.g., Python Delta Sharing + * Client): https://docs.databricks.com/aws/en/delta-sharing/sharing-over-oidc-m2m - Configuration + * and usage for User-to-Machine (U2M) applications (e.g., PowerBI): + * https://docs.databricks.com/aws/en/delta-sharing/sharing-over-oidc-u2m + */ + FederationPolicy create(CreateFederationPolicyRequest createFederationPolicyRequest); + + /** + * Delete recipient federation policy. + * + *

Deletes an existing federation policy for an OIDC_FEDERATION recipient. The caller must be + * the owner of the recipient. + */ + void delete(DeleteFederationPolicyRequest deleteFederationPolicyRequest); + + /** + * Get recipient federation policy. + * + *

Reads an existing federation policy for an OIDC_FEDERATION recipient for sharing data from + * Databricks to non-Databricks recipients. The caller must have read access to the recipient. + */ + FederationPolicy getFederationPolicy(GetFederationPolicyRequest getFederationPolicyRequest); + + /** + * List recipient federation policies. + * + *

Lists federation policies for an OIDC_FEDERATION recipient for sharing data from Databricks + * to non-Databricks recipients. The caller must have read access to the recipient. + */ + ListFederationPoliciesResponse list(ListFederationPoliciesRequest listFederationPoliciesRequest); + + /** + * Update recipient federation policy. + * + *

Updates an existing federation policy for an OIDC_RECIPIENT. The caller must be the owner of + * the recipient. + */ + FederationPolicy update(UpdateFederationPolicyRequest updateFederationPolicyRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Table.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Table.java index c0d4e744e..119e8a050 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Table.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Table.java @@ -22,6 +22,10 @@ public class Table { @JsonProperty("internal_attributes") private TableInternalAttributes internalAttributes; + /** The catalog and schema of the materialized table */ + @JsonProperty("materialization_namespace") + private String materializationNamespace; + /** The name of a materialized table. */ @JsonProperty("materialized_table_name") private String materializedTableName; @@ -73,6 +77,15 @@ public TableInternalAttributes getInternalAttributes() { return internalAttributes; } + public Table setMaterializationNamespace(String materializationNamespace) { + this.materializationNamespace = materializationNamespace; + return this; + } + + public String getMaterializationNamespace() { + return materializationNamespace; + } + public Table setMaterializedTableName(String materializedTableName) { this.materializedTableName = materializedTableName; return this; @@ -135,6 +148,7 @@ public boolean equals(Object o) { return Objects.equals(comment, that.comment) && Objects.equals(id, that.id) && Objects.equals(internalAttributes, that.internalAttributes) + && Objects.equals(materializationNamespace, that.materializationNamespace) && Objects.equals(materializedTableName, that.materializedTableName) && Objects.equals(name, that.name) && Objects.equals(schema, that.schema) @@ -146,7 +160,16 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - comment, id, internalAttributes, materializedTableName, name, schema, share, shareId, tags); + comment, + id, + internalAttributes, + materializationNamespace, + materializedTableName, + name, + schema, + share, + shareId, + tags); } @Override @@ -155,6 +178,7 @@ public String toString() { .add("comment", comment) .add("id", id) .add("internalAttributes", internalAttributes) + .add("materializationNamespace", materializationNamespace) .add("materializedTableName", materializedTableName) .add("name", name) .add("schema", schema) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateFederationPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateFederationPolicyRequest.java new file mode 100755 index 000000000..d318fe872 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateFederationPolicyRequest.java @@ -0,0 +1,99 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Update recipient federation policy */ +@Generated +public class UpdateFederationPolicyRequest { + /** Name of the policy. This is the name of the current name of the policy. */ + @JsonIgnore private String name; + + /** */ + @JsonProperty("policy") + private FederationPolicy policy; + + /** + * Name of the recipient. This is the name of the recipient for which the policy is being updated. + */ + @JsonIgnore private String recipientName; + + /** + * The field mask specifies which fields of the policy to update. To specify multiple fields in + * the field mask, use comma as the separator (no space). The special value '*' indicates that all + * fields should be updated (full replacement). If unspecified, all fields that are set in the + * policy provided in the update request will overwrite the corresponding fields in the existing + * policy. Example value: 'comment,oidc_policy.audiences'. + */ + @JsonIgnore + @QueryParam("update_mask") + private String updateMask; + + public UpdateFederationPolicyRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public UpdateFederationPolicyRequest setPolicy(FederationPolicy policy) { + this.policy = policy; + return this; + } + + public FederationPolicy getPolicy() { + return policy; + } + + public UpdateFederationPolicyRequest setRecipientName(String recipientName) { + this.recipientName = recipientName; + return this; + } + + public String getRecipientName() { + return recipientName; + } + + public UpdateFederationPolicyRequest setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateFederationPolicyRequest that = (UpdateFederationPolicyRequest) o; + return Objects.equals(name, that.name) + && Objects.equals(policy, that.policy) + && Objects.equals(recipientName, that.recipientName) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(name, policy, recipientName, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateFederationPolicyRequest.class) + .add("name", name) + .add("policy", policy) + .add("recipientName", recipientName) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissions.java index 823f9e6b3..26288362d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissions.java @@ -18,6 +18,10 @@ public class UpdateSharePermissions { /** The name of the share. */ @JsonIgnore private String name; + /** Optional. Whether to return the latest permissions list of the share in the response. */ + @JsonProperty("omit_permissions_list") + private Boolean omitPermissionsList; + public UpdateSharePermissions setChanges(Collection changes) { this.changes = changes; return this; @@ -36,17 +40,28 @@ public String getName() { return name; } + public UpdateSharePermissions setOmitPermissionsList(Boolean omitPermissionsList) { + this.omitPermissionsList = omitPermissionsList; + return this; + } + + public Boolean getOmitPermissionsList() { + return omitPermissionsList; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; UpdateSharePermissions that = (UpdateSharePermissions) o; - return Objects.equals(changes, that.changes) && Objects.equals(name, that.name); + return Objects.equals(changes, that.changes) + && Objects.equals(name, that.name) + && Objects.equals(omitPermissionsList, that.omitPermissionsList); } @Override public int hashCode() { - return Objects.hash(changes, name); + return Objects.hash(changes, name, omitPermissionsList); } @Override @@ -54,6 +69,7 @@ public String toString() { return new ToStringer(UpdateSharePermissions.class) .add("changes", changes) .add("name", name) + .add("omitPermissionsList", omitPermissionsList) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequest.java index 3725daf02..a0ed02ec2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequest.java @@ -14,6 +14,13 @@ public class UpdateAlertRequest { @JsonProperty("alert") private UpdateAlertRequestAlert alert; + /** + * If true, automatically resolve alert display name conflicts. Otherwise, fail the request if the + * alert's display name conflicts with an existing alert's display name. + */ + @JsonProperty("auto_resolve_display_name") + private Boolean autoResolveDisplayName; + /** */ @JsonIgnore private String id; @@ -40,6 +47,15 @@ public UpdateAlertRequestAlert getAlert() { return alert; } + public UpdateAlertRequest setAutoResolveDisplayName(Boolean autoResolveDisplayName) { + this.autoResolveDisplayName = autoResolveDisplayName; + return this; + } + + public Boolean getAutoResolveDisplayName() { + return autoResolveDisplayName; + } + public UpdateAlertRequest setId(String id) { this.id = id; return this; @@ -64,19 +80,21 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; UpdateAlertRequest that = (UpdateAlertRequest) o; return Objects.equals(alert, that.alert) + && Objects.equals(autoResolveDisplayName, that.autoResolveDisplayName) && Objects.equals(id, that.id) && Objects.equals(updateMask, that.updateMask); } @Override public int hashCode() { - return Objects.hash(alert, id, updateMask); + return Objects.hash(alert, autoResolveDisplayName, id, updateMask); } @Override public String toString() { return new ToStringer(UpdateAlertRequest.class) .add("alert", alert) + .add("autoResolveDisplayName", autoResolveDisplayName) .add("id", id) .add("updateMask", updateMask) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateQueryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateQueryRequest.java index 3edc04649..46be75273 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateQueryRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateQueryRequest.java @@ -10,6 +10,13 @@ @Generated public class UpdateQueryRequest { + /** + * If true, automatically resolve alert display name conflicts. Otherwise, fail the request if the + * alert's display name conflicts with an existing alert's display name. + */ + @JsonProperty("auto_resolve_display_name") + private Boolean autoResolveDisplayName; + /** */ @JsonIgnore private String id; @@ -31,6 +38,15 @@ public class UpdateQueryRequest { @JsonProperty("update_mask") private String updateMask; + public UpdateQueryRequest setAutoResolveDisplayName(Boolean autoResolveDisplayName) { + this.autoResolveDisplayName = autoResolveDisplayName; + return this; + } + + public Boolean getAutoResolveDisplayName() { + return autoResolveDisplayName; + } + public UpdateQueryRequest setId(String id) { this.id = id; return this; @@ -63,19 +79,21 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; UpdateQueryRequest that = (UpdateQueryRequest) o; - return Objects.equals(id, that.id) + return Objects.equals(autoResolveDisplayName, that.autoResolveDisplayName) + && Objects.equals(id, that.id) && Objects.equals(query, that.query) && Objects.equals(updateMask, that.updateMask); } @Override public int hashCode() { - return Objects.hash(id, query, updateMask); + return Objects.hash(autoResolveDisplayName, id, query, updateMask); } @Override public String toString() { return new ToStringer(UpdateQueryRequest.class) + .add("autoResolveDisplayName", autoResolveDisplayName) .add("id", id) .add("query", query) .add("updateMask", updateMask)