diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 2924d5d6d..26ece1bc5 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -31b3fea21dbe5a3a652937691602eb66d6dba30b \ No newline at end of file +05692f4dcf168be190bb7bcda725ee8b368b7ae3 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 49878353a..137cc27b5 100755 --- a/.gitattributes +++ b/.gitattributes @@ -787,6 +787,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateRespo databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/VolumesStorageInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkloadType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkspaceStorageInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetails.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetailsGrantRule.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelPublishedQueryExecutionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponseStatus.java linguist-generated=true @@ -812,6 +814,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExe databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetConversationMessageRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageAttachmentQueryResultRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultResponse.java linguist-generated=true @@ -829,6 +833,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetDashb databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetScheduleRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetSubscriptionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java linguist-generated=true @@ -1039,6 +1045,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssign databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspacePermission.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspacePermissions.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/AuthenticationMethod.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelAllRuns.java linguist-generated=true @@ -1060,6 +1067,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Continuous.jav databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CronSchedule.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardPageSnapshot.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardTask.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardTaskOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtTask.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteJob.java linguist-generated=true @@ -1136,6 +1146,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PipelineTask.j databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PolicyComplianceForJobsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PolicyComplianceForJobsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PolicyComplianceForJobsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiModel.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiTable.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiTask.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PythonWheelTask.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/QueueDetails.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/QueueDetailsCodeCode.java linguist-generated=true @@ -1190,9 +1203,12 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskDashboa databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskFile.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskQuery.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskSubscription.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/StorageMode.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRunResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Subscription.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubscriptionSubscriber.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TableUpdateTriggerConfiguration.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskDependency.java linguist-generated=true @@ -1211,6 +1227,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ViewType.java databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ViewsToExport.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Webhook.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookNotifications.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WidgetErrorDetail.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AddExchangeForListingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AddExchangeForListingResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java linguist-generated=true @@ -1382,6 +1399,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ActivityAction.j databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ActivityType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequestResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfoHttpHeader.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentActivityAction.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentObject.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateComment.java linguist-generated=true @@ -1444,6 +1464,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingExper databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetByNameRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentByNameResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionLevelsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionLevelsResponse.java linguist-generated=true @@ -1494,6 +1518,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogParamResponse databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Metric.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Model.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelDatabricks.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelInput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryService.java linguist-generated=true @@ -2008,9 +2033,21 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetw databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EmailConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Empty.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebook.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebookAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebookImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebookService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableIpAccessListsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableIpAccessListsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableIpAccessListsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboard.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboardAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboardImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboardService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloading.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloadingAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloadingImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloadingService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoring.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringImpl.java linguist-generated=true @@ -2134,6 +2171,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefa databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyAccessRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyDbfsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyFeaturesRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableExportNotebookRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableNotebookTableClipboardRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableResultsDownloadingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnhancedSecurityMonitoringSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEsmEnablementAccountSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateIpAccessList.java linguist-generated=true diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 17746c816..0ec576df1 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -11,3 +11,29 @@ ### Internal Changes ### API Changes +* Added `workspaceClient.enableExportNotebook()` service, `workspaceClient.enableNotebookTableClipboard()` service and `workspaceClient.enableResultsDownloading()` service. +* Added `getCredentialsForTraceDataDownload()` and `getCredentialsForTraceDataUpload()` methods for `workspaceClient.experiments()` service. +* Added `getDownloadFullQueryResult()` method for `workspaceClient.genie()` service. +* Added `getPublishedDashboardTokenInfo()` method for `workspaceClient.lakeviewEmbedded()` service. +* Added `bindingWorkspaceIds` field for `com.databricks.sdk.service.billing.BudgetPolicy`. +* Added `downloadId` field for `com.databricks.sdk.service.dashboards.GenieGenerateDownloadFullQueryResultResponse`. +* Added `dashboardOutput` field for `com.databricks.sdk.service.jobs.RunOutput`. +* Added `dashboardTask` and `powerBiTask` fields for `com.databricks.sdk.service.jobs.RunTask`. +* Added `dashboardTask` and `powerBiTask` fields for `com.databricks.sdk.service.jobs.SubmitTask`. +* Added `dashboardTask` and `powerBiTask` fields for `com.databricks.sdk.service.jobs.Task`. +* Added `includeFeatures` field for `com.databricks.sdk.service.ml.CreateForecastingExperimentRequest`. +* Added `models` field for `com.databricks.sdk.service.ml.LogInputs`. +* Added `datasetDigest`, `datasetName` and `modelId` fields for `com.databricks.sdk.service.ml.LogMetric`. +* Added `datasetDigest`, `datasetName`, `modelId` and `runId` fields for `com.databricks.sdk.service.ml.Metric`. +* Added `modelInputs` field for `com.databricks.sdk.service.ml.RunInputs`. +* Added `clientApplication` field for `com.databricks.sdk.service.sql.QueryInfo`. +* Added `GEOGRAPHY` and `GEOMETRY` enum values for `com.databricks.sdk.service.catalog.ColumnTypeName`. +* Added `ALLOCATION_TIMEOUT_NO_HEALTHY_AND_WARMED_UP_CLUSTERS`, `DOCKER_CONTAINER_CREATION_EXCEPTION`, `DOCKER_IMAGE_TOO_LARGE_FOR_INSTANCE_EXCEPTION` and `DOCKER_INVALID_OS_EXCEPTION` enum values for `com.databricks.sdk.service.compute.TerminationReasonCode`. +* Added `STANDARD` enum value for `com.databricks.sdk.service.jobs.PerformanceTarget`. +* Added `CAN_VIEW` enum value for `com.databricks.sdk.service.sql.WarehousePermissionLevel`. +* [Breaking] Changed `generateDownloadFullQueryResult()` method for `workspaceClient.genie()` service . Method path has changed. +* [Breaking] Changed waiter for `workspaceClient.commandExecution().create()` method. +* [Breaking] Changed waiter for `workspaceClient.commandExecution().execute()` method. +* [Breaking] Removed `error`, `status` and `transientStatementId` fields for `com.databricks.sdk.service.dashboards.GenieGenerateDownloadFullQueryResultResponse`. +* [Breaking] Removed `BALANCED` and `COST_OPTIMIZED` enum values for `com.databricks.sdk.service.jobs.PerformanceTarget`. +* [Breaking] Removed `workspaceClient.pipelines().waitGetPipelineRunning()` method. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicy.java index aa6143b63..66355b115 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicy.java @@ -11,6 +11,13 @@ /** Contains the BudgetPolicy details. */ @Generated public class BudgetPolicy { + /** + * List of workspaces that this budget policy will be exclusively bound to. An empty binding + * implies that this budget policy is open to any workspace in the account. + */ + @JsonProperty("binding_workspace_ids") + private Collection bindingWorkspaceIds; + /** A list of tags defined by the customer. At most 20 entries are allowed per policy. */ @JsonProperty("custom_tags") private Collection customTags; @@ -27,6 +34,15 @@ public class BudgetPolicy { @JsonProperty("policy_name") private String policyName; + public BudgetPolicy setBindingWorkspaceIds(Collection bindingWorkspaceIds) { + this.bindingWorkspaceIds = bindingWorkspaceIds; + return this; + } + + public Collection getBindingWorkspaceIds() { + return bindingWorkspaceIds; + } + public BudgetPolicy setCustomTags( Collection customTags) { this.customTags = customTags; @@ -60,19 +76,21 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; BudgetPolicy that = (BudgetPolicy) o; - return Objects.equals(customTags, that.customTags) + return Objects.equals(bindingWorkspaceIds, that.bindingWorkspaceIds) + && Objects.equals(customTags, that.customTags) && Objects.equals(policyId, that.policyId) && Objects.equals(policyName, that.policyName); } @Override public int hashCode() { - return Objects.hash(customTags, policyId, policyName); + return Objects.hash(bindingWorkspaceIds, customTags, policyId, policyName); } @Override public String toString() { return new ToStringer(BudgetPolicy.class) + .add("bindingWorkspaceIds", bindingWorkspaceIds) .add("customTags", customTags) .add("policyId", policyId) .add("policyName", policyName) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnTypeName.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnTypeName.java index 54d72d2f0..0d84fbe14 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnTypeName.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnTypeName.java @@ -15,6 +15,8 @@ public enum ColumnTypeName { DECIMAL, DOUBLE, FLOAT, + GEOGRAPHY, + GEOMETRY, INT, INTERVAL, LONG, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContent.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContent.java index 16f0ebbc6..bbe39faf4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContent.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContent.java @@ -29,7 +29,13 @@ public class CreateVolumeRequestContent { @JsonProperty("storage_location") private String storageLocation; - /** */ + /** + * The type of the volume. An external volume is located in the specified external location. A + * managed volume is located in the default location which is specified by the parent schema, or + * the parent catalog, or the Metastore. [Learn more] + * + *

[Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external + */ @JsonProperty("volume_type") private VolumeType volumeType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfo.java index d28cf4e10..ea18910d4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfo.java @@ -76,7 +76,13 @@ public class VolumeInfo { @JsonProperty("volume_id") private String volumeId; - /** */ + /** + * The type of the volume. An external volume is located in the specified external location. A + * managed volume is located in the default location which is specified by the parent schema, or + * the parent catalog, or the Metastore. [Learn more] + * + *

[Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external + */ @JsonProperty("volume_type") private VolumeType volumeType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeType.java index 044f72a39..fcb9f83ac 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeType.java @@ -4,6 +4,13 @@ import com.databricks.sdk.support.Generated; +/** + * The type of the volume. An external volume is located in the specified external location. A + * managed volume is located in the default location which is specified by the parent schema, or the + * parent catalog, or the Metastore. [Learn more] + * + *

[Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external + */ @Generated public enum VolumeType { EXTERNAL, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java index 74e6b36fb..a02e39cb4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java @@ -33,6 +33,13 @@ public class Environment { @JsonProperty("dependencies") private Collection dependencies; + /** + * List of jar dependencies, should be string representing volume paths. For example: + * `/Volumes/path/to/test.jar`. + */ + @JsonProperty("jar_dependencies") + private Collection jarDependencies; + public Environment setClient(String client) { this.client = client; return this; @@ -51,17 +58,28 @@ public Collection getDependencies() { return dependencies; } + public Environment setJarDependencies(Collection jarDependencies) { + this.jarDependencies = jarDependencies; + return this; + } + + public Collection getJarDependencies() { + return jarDependencies; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Environment that = (Environment) o; - return Objects.equals(client, that.client) && Objects.equals(dependencies, that.dependencies); + return Objects.equals(client, that.client) + && Objects.equals(dependencies, that.dependencies) + && Objects.equals(jarDependencies, that.jarDependencies); } @Override public int hashCode() { - return Objects.hash(client, dependencies); + return Objects.hash(client, dependencies, jarDependencies); } @Override @@ -69,6 +87,7 @@ public String toString() { return new ToStringer(Environment.class) .add("client", client) .add("dependencies", dependencies) + .add("jarDependencies", jarDependencies) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java index bdd74cd8c..173d3492b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java @@ -11,6 +11,7 @@ public enum TerminationReasonCode { ACCESS_TOKEN_FAILURE, ALLOCATION_TIMEOUT, ALLOCATION_TIMEOUT_NODE_DAEMON_NOT_READY, + ALLOCATION_TIMEOUT_NO_HEALTHY_AND_WARMED_UP_CLUSTERS, ALLOCATION_TIMEOUT_NO_HEALTHY_CLUSTERS, ALLOCATION_TIMEOUT_NO_MATCHED_CLUSTERS, ALLOCATION_TIMEOUT_NO_READY_CLUSTERS, @@ -63,7 +64,10 @@ public enum TerminationReasonCode { DATA_ACCESS_CONFIG_CHANGED, DBFS_COMPONENT_UNHEALTHY, DISASTER_RECOVERY_REPLICATION, + DOCKER_CONTAINER_CREATION_EXCEPTION, DOCKER_IMAGE_PULL_FAILURE, + DOCKER_IMAGE_TOO_LARGE_FOR_INSTANCE_EXCEPTION, + DOCKER_INVALID_OS_EXCEPTION, DRIVER_EVICTION, DRIVER_LAUNCH_TIMEOUT, DRIVER_NODE_UNREACHABLE, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetails.java new file mode 100755 index 000000000..a3d94b322 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetails.java @@ -0,0 +1,99 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class AuthorizationDetails { + /** + * Represents downscoped permission rules with specific access rights. This field is specific to + * `workspace_rule_set` constraint. + */ + @JsonProperty("grant_rules") + private Collection grantRules; + + /** The acl path of the tree store resource resource. */ + @JsonProperty("resource_legacy_acl_path") + private String resourceLegacyAclPath; + + /** + * The resource name to which the authorization rule applies. This field is specific to + * `workspace_rule_set` constraint. Format: `workspaces/{workspace_id}/dashboards/{dashboard_id}` + */ + @JsonProperty("resource_name") + private String resourceName; + + /** + * The type of authorization downscoping policy. Ex: `workspace_rule_set` defines access rules for + * a specific workspace resource + */ + @JsonProperty("type") + private String typeValue; + + public AuthorizationDetails setGrantRules(Collection grantRules) { + this.grantRules = grantRules; + return this; + } + + public Collection getGrantRules() { + return grantRules; + } + + public AuthorizationDetails setResourceLegacyAclPath(String resourceLegacyAclPath) { + this.resourceLegacyAclPath = resourceLegacyAclPath; + return this; + } + + public String getResourceLegacyAclPath() { + return resourceLegacyAclPath; + } + + public AuthorizationDetails setResourceName(String resourceName) { + this.resourceName = resourceName; + return this; + } + + public String getResourceName() { + return resourceName; + } + + public AuthorizationDetails setType(String typeValue) { + this.typeValue = typeValue; + return this; + } + + public String getType() { + return typeValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AuthorizationDetails that = (AuthorizationDetails) o; + return Objects.equals(grantRules, that.grantRules) + && Objects.equals(resourceLegacyAclPath, that.resourceLegacyAclPath) + && Objects.equals(resourceName, that.resourceName) + && Objects.equals(typeValue, that.typeValue); + } + + @Override + public int hashCode() { + return Objects.hash(grantRules, resourceLegacyAclPath, resourceName, typeValue); + } + + @Override + public String toString() { + return new ToStringer(AuthorizationDetails.class) + .add("grantRules", grantRules) + .add("resourceLegacyAclPath", resourceLegacyAclPath) + .add("resourceName", resourceName) + .add("typeValue", typeValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetailsGrantRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetailsGrantRule.java new file mode 100755 index 000000000..ab39e1f8b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetailsGrantRule.java @@ -0,0 +1,48 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AuthorizationDetailsGrantRule { + /** + * Permission sets for dashboard are defined in + * iam-common/rbac-common/permission-sets/definitions/TreeStoreBasePermissionSets Ex: + * `permissionSets/dashboard.runner` + */ + @JsonProperty("permission_set") + private String permissionSet; + + public AuthorizationDetailsGrantRule setPermissionSet(String permissionSet) { + this.permissionSet = permissionSet; + return this; + } + + public String getPermissionSet() { + return permissionSet; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AuthorizationDetailsGrantRule that = (AuthorizationDetailsGrantRule) o; + return Objects.equals(permissionSet, that.permissionSet); + } + + @Override + public int hashCode() { + return Objects.hash(permissionSet); + } + + @Override + public String toString() { + return new ToStringer(AuthorizationDetailsGrantRule.class) + .add("permissionSet", permissionSet) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java index 81daf0c1b..93b3e1c93 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java @@ -174,14 +174,52 @@ public GenieGenerateDownloadFullQueryResultResponse generateDownloadFullQueryRes /** * Generate full query result download. * - *

Initiate full SQL query result download and obtain a transient ID for tracking the download - * progress. This call initiates a new SQL execution to generate the query result. + *

Initiate full SQL query result download and obtain a `download_id` to track the download + * progress. This call initiates a new SQL execution to generate the query result. The result is + * stored in an external link can be retrieved using the [Get Download Full Query + * Result](:method:genie/getdownloadfullqueryresult) API. Warning: Databricks strongly recommends + * that you protect the URLs that are returned by the `EXTERNAL_LINKS` disposition. See [Execute + * Statement](:method:statementexecution/executestatement) for more details. */ public GenieGenerateDownloadFullQueryResultResponse generateDownloadFullQueryResult( GenieGenerateDownloadFullQueryResultRequest request) { return impl.generateDownloadFullQueryResult(request); } + public GenieGetDownloadFullQueryResultResponse getDownloadFullQueryResult( + String spaceId, + String conversationId, + String messageId, + String attachmentId, + String downloadId) { + return getDownloadFullQueryResult( + new GenieGetDownloadFullQueryResultRequest() + .setSpaceId(spaceId) + .setConversationId(conversationId) + .setMessageId(messageId) + .setAttachmentId(attachmentId) + .setDownloadId(downloadId)); + } + + /** + * Get download full query result. + * + *

After [Generating a Full Query Result Download](:method:genie/getdownloadfullqueryresult) + * and successfully receiving a `download_id`, use this API to Poll download progress and retrieve + * the SQL query result external link(s) upon completion. Warning: Databricks strongly recommends + * that you protect the URLs that are returned by the `EXTERNAL_LINKS` disposition. When you use + * the `EXTERNAL_LINKS` disposition, a short-lived, presigned URL is generated, which can be used + * to download the results directly from Amazon S3. As a short-lived access credential is embedded + * in this presigned URL, you should protect the URL. Because presigned URLs are already generated + * with embedded temporary access credentials, you must not set an Authorization header in the + * download requests. See [Execute Statement](:method:statementexecution/executestatement) for + * more details. + */ + public GenieGetDownloadFullQueryResultResponse getDownloadFullQueryResult( + GenieGetDownloadFullQueryResultRequest request) { + return impl.getDownloadFullQueryResult(request); + } + public GenieMessage getMessage(String spaceId, String conversationId, String messageId) { return getMessage( new GenieGetConversationMessageRequest() diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultResponse.java index 3b8991cc1..e51751c8b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultResponse.java @@ -9,46 +9,17 @@ @Generated public class GenieGenerateDownloadFullQueryResultResponse { - /** Error message if Genie failed to download the result */ - @JsonProperty("error") - private String error; + /** Download ID. Use this ID to track the download request in subsequent polling calls */ + @JsonProperty("download_id") + private String downloadId; - /** Download result status */ - @JsonProperty("status") - private MessageStatus status; - - /** - * Transient Statement ID. Use this ID to track the download request in subsequent polling calls - */ - @JsonProperty("transient_statement_id") - private String transientStatementId; - - public GenieGenerateDownloadFullQueryResultResponse setError(String error) { - this.error = error; - return this; - } - - public String getError() { - return error; - } - - public GenieGenerateDownloadFullQueryResultResponse setStatus(MessageStatus status) { - this.status = status; - return this; - } - - public MessageStatus getStatus() { - return status; - } - - public GenieGenerateDownloadFullQueryResultResponse setTransientStatementId( - String transientStatementId) { - this.transientStatementId = transientStatementId; + public GenieGenerateDownloadFullQueryResultResponse setDownloadId(String downloadId) { + this.downloadId = downloadId; return this; } - public String getTransientStatementId() { - return transientStatementId; + public String getDownloadId() { + return downloadId; } @Override @@ -57,22 +28,18 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; GenieGenerateDownloadFullQueryResultResponse that = (GenieGenerateDownloadFullQueryResultResponse) o; - return Objects.equals(error, that.error) - && Objects.equals(status, that.status) - && Objects.equals(transientStatementId, that.transientStatementId); + return Objects.equals(downloadId, that.downloadId); } @Override public int hashCode() { - return Objects.hash(error, status, transientStatementId); + return Objects.hash(downloadId); } @Override public String toString() { return new ToStringer(GenieGenerateDownloadFullQueryResultResponse.class) - .add("error", error) - .add("status", status) - .add("transientStatementId", transientStatementId) + .add("downloadId", downloadId) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultRequest.java new file mode 100755 index 000000000..9a3024bd7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultRequest.java @@ -0,0 +1,103 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get download full query result */ +@Generated +public class GenieGetDownloadFullQueryResultRequest { + /** Attachment ID */ + @JsonIgnore private String attachmentId; + + /** Conversation ID */ + @JsonIgnore private String conversationId; + + /** + * Download ID. This ID is provided by the [Generate Download + * endpoint](:method:genie/generateDownloadFullQueryResult) + */ + @JsonIgnore private String downloadId; + + /** Message ID */ + @JsonIgnore private String messageId; + + /** Space ID */ + @JsonIgnore private String spaceId; + + public GenieGetDownloadFullQueryResultRequest setAttachmentId(String attachmentId) { + this.attachmentId = attachmentId; + return this; + } + + public String getAttachmentId() { + return attachmentId; + } + + public GenieGetDownloadFullQueryResultRequest setConversationId(String conversationId) { + this.conversationId = conversationId; + return this; + } + + public String getConversationId() { + return conversationId; + } + + public GenieGetDownloadFullQueryResultRequest setDownloadId(String downloadId) { + this.downloadId = downloadId; + return this; + } + + public String getDownloadId() { + return downloadId; + } + + public GenieGetDownloadFullQueryResultRequest setMessageId(String messageId) { + this.messageId = messageId; + return this; + } + + public String getMessageId() { + return messageId; + } + + public GenieGetDownloadFullQueryResultRequest setSpaceId(String spaceId) { + this.spaceId = spaceId; + return this; + } + + public String getSpaceId() { + return spaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieGetDownloadFullQueryResultRequest that = (GenieGetDownloadFullQueryResultRequest) o; + return Objects.equals(attachmentId, that.attachmentId) + && Objects.equals(conversationId, that.conversationId) + && Objects.equals(downloadId, that.downloadId) + && Objects.equals(messageId, that.messageId) + && Objects.equals(spaceId, that.spaceId); + } + + @Override + public int hashCode() { + return Objects.hash(attachmentId, conversationId, downloadId, messageId, spaceId); + } + + @Override + public String toString() { + return new ToStringer(GenieGetDownloadFullQueryResultRequest.class) + .add("attachmentId", attachmentId) + .add("conversationId", conversationId) + .add("downloadId", downloadId) + .add("messageId", messageId) + .add("spaceId", spaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultResponse.java new file mode 100755 index 000000000..490c5c518 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultResponse.java @@ -0,0 +1,48 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class GenieGetDownloadFullQueryResultResponse { + /** + * SQL Statement Execution response. See [Get status, manifest, and result first + * chunk](:method:statementexecution/getstatement) for more details. + */ + @JsonProperty("statement_response") + private com.databricks.sdk.service.sql.StatementResponse statementResponse; + + public GenieGetDownloadFullQueryResultResponse setStatementResponse( + com.databricks.sdk.service.sql.StatementResponse statementResponse) { + this.statementResponse = statementResponse; + return this; + } + + public com.databricks.sdk.service.sql.StatementResponse getStatementResponse() { + return statementResponse; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieGetDownloadFullQueryResultResponse that = (GenieGetDownloadFullQueryResultResponse) o; + return Objects.equals(statementResponse, that.statementResponse); + } + + @Override + public int hashCode() { + return Objects.hash(statementResponse); + } + + @Override + public String toString() { + return new ToStringer(GenieGetDownloadFullQueryResultResponse.class) + .add("statementResponse", statementResponse) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java index c536eda5e..8396a9cff 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java @@ -75,7 +75,7 @@ public GenieGenerateDownloadFullQueryResultResponse generateDownloadFullQueryRes GenieGenerateDownloadFullQueryResultRequest request) { String path = String.format( - "/api/2.0/genie/spaces/%s/conversations/%s/messages/%s/attachments/%s/generate-download", + "/api/2.0/genie/spaces/%s/conversations/%s/messages/%s/attachments/%s/downloads", request.getSpaceId(), request.getConversationId(), request.getMessageId(), @@ -90,6 +90,27 @@ public GenieGenerateDownloadFullQueryResultResponse generateDownloadFullQueryRes } } + @Override + public GenieGetDownloadFullQueryResultResponse getDownloadFullQueryResult( + GenieGetDownloadFullQueryResultRequest request) { + String path = + String.format( + "/api/2.0/genie/spaces/%s/conversations/%s/messages/%s/attachments/%s/downloads/%s", + request.getSpaceId(), + request.getConversationId(), + request.getMessageId(), + request.getAttachmentId(), + request.getDownloadId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GenieGetDownloadFullQueryResultResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public GenieMessage getMessage(GenieGetConversationMessageRequest request) { String path = diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java index b9f3c75cb..a1063b051 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java @@ -44,12 +44,33 @@ GenieGetMessageQueryResultResponse executeMessageQuery( /** * Generate full query result download. * - *

Initiate full SQL query result download and obtain a transient ID for tracking the download - * progress. This call initiates a new SQL execution to generate the query result. + *

Initiate full SQL query result download and obtain a `download_id` to track the download + * progress. This call initiates a new SQL execution to generate the query result. The result is + * stored in an external link can be retrieved using the [Get Download Full Query + * Result](:method:genie/getdownloadfullqueryresult) API. Warning: Databricks strongly recommends + * that you protect the URLs that are returned by the `EXTERNAL_LINKS` disposition. See [Execute + * Statement](:method:statementexecution/executestatement) for more details. */ GenieGenerateDownloadFullQueryResultResponse generateDownloadFullQueryResult( GenieGenerateDownloadFullQueryResultRequest genieGenerateDownloadFullQueryResultRequest); + /** + * Get download full query result. + * + *

After [Generating a Full Query Result Download](:method:genie/getdownloadfullqueryresult) + * and successfully receiving a `download_id`, use this API to Poll download progress and retrieve + * the SQL query result external link(s) upon completion. Warning: Databricks strongly recommends + * that you protect the URLs that are returned by the `EXTERNAL_LINKS` disposition. When you use + * the `EXTERNAL_LINKS` disposition, a short-lived, presigned URL is generated, which can be used + * to download the results directly from Amazon S3. As a short-lived access credential is embedded + * in this presigned URL, you should protect the URL. Because presigned URLs are already generated + * with embedded temporary access credentials, you must not set an Authorization header in the + * download requests. See [Execute Statement](:method:statementexecution/executestatement) for + * more details. + */ + GenieGetDownloadFullQueryResultResponse getDownloadFullQueryResult( + GenieGetDownloadFullQueryResultRequest genieGetDownloadFullQueryResultRequest); + /** * Get conversation message. * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoRequest.java new file mode 100755 index 000000000..6bba3f0d8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoRequest.java @@ -0,0 +1,77 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Read an information of a published dashboard to mint an OAuth token. */ +@Generated +public class GetPublishedDashboardTokenInfoRequest { + /** UUID identifying the published dashboard. */ + @JsonIgnore private String dashboardId; + + /** Provided external value to be included in the custom claim. */ + @JsonIgnore + @QueryParam("external_value") + private String externalValue; + + /** Provided external viewer id to be included in the custom claim. */ + @JsonIgnore + @QueryParam("external_viewer_id") + private String externalViewerId; + + public GetPublishedDashboardTokenInfoRequest setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public GetPublishedDashboardTokenInfoRequest setExternalValue(String externalValue) { + this.externalValue = externalValue; + return this; + } + + public String getExternalValue() { + return externalValue; + } + + public GetPublishedDashboardTokenInfoRequest setExternalViewerId(String externalViewerId) { + this.externalViewerId = externalViewerId; + return this; + } + + public String getExternalViewerId() { + return externalViewerId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetPublishedDashboardTokenInfoRequest that = (GetPublishedDashboardTokenInfoRequest) o; + return Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(externalValue, that.externalValue) + && Objects.equals(externalViewerId, that.externalViewerId); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId, externalValue, externalViewerId); + } + + @Override + public String toString() { + return new ToStringer(GetPublishedDashboardTokenInfoRequest.class) + .add("dashboardId", dashboardId) + .add("externalValue", externalValue) + .add("externalViewerId", externalViewerId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoResponse.java new file mode 100755 index 000000000..e7022d311 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoResponse.java @@ -0,0 +1,83 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class GetPublishedDashboardTokenInfoResponse { + /** + * Authorization constraints for accessing the published dashboard. Currently includes + * `workspace_rule_set` and could be enriched with `unity_catalog_privileges` before oAuth token + * generation. + */ + @JsonProperty("authorization_details") + private Collection authorizationDetails; + + /** + * Custom claim generated from external_value and external_viewer_id. Format: + * `urn:aibi:external_data:::` + */ + @JsonProperty("custom_claim") + private String customClaim; + + /** Scope defining access permissions. */ + @JsonProperty("scope") + private String scope; + + public GetPublishedDashboardTokenInfoResponse setAuthorizationDetails( + Collection authorizationDetails) { + this.authorizationDetails = authorizationDetails; + return this; + } + + public Collection getAuthorizationDetails() { + return authorizationDetails; + } + + public GetPublishedDashboardTokenInfoResponse setCustomClaim(String customClaim) { + this.customClaim = customClaim; + return this; + } + + public String getCustomClaim() { + return customClaim; + } + + public GetPublishedDashboardTokenInfoResponse setScope(String scope) { + this.scope = scope; + return this; + } + + public String getScope() { + return scope; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetPublishedDashboardTokenInfoResponse that = (GetPublishedDashboardTokenInfoResponse) o; + return Objects.equals(authorizationDetails, that.authorizationDetails) + && Objects.equals(customClaim, that.customClaim) + && Objects.equals(scope, that.scope); + } + + @Override + public int hashCode() { + return Objects.hash(authorizationDetails, customClaim, scope); + } + + @Override + public String toString() { + return new ToStringer(GetPublishedDashboardTokenInfoResponse.class) + .add("authorizationDetails", authorizationDetails) + .add("customClaim", customClaim) + .add("scope", scope) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java index 3e71a00e8..ec34c7536 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java @@ -37,6 +37,26 @@ public void getPublishedDashboardEmbedded(GetPublishedDashboardEmbeddedRequest r impl.getPublishedDashboardEmbedded(request); } + public GetPublishedDashboardTokenInfoResponse getPublishedDashboardTokenInfo(String dashboardId) { + return getPublishedDashboardTokenInfo( + new GetPublishedDashboardTokenInfoRequest().setDashboardId(dashboardId)); + } + + /** + * Read an information of a published dashboard to mint an OAuth token. + * + *

Get a required authorization details and scopes of a published dashboard to mint an OAuth + * token. The `authorization_details` can be enriched to apply additional restriction. + * + *

Example: Adding the following `authorization_details` object to downscope the viewer + * permission to specific table ``` { type: "unity_catalog_privileges", privileges: ["SELECT"], + * object_type: "TABLE", object_full_path: "main.default.testdata" } ``` + */ + public GetPublishedDashboardTokenInfoResponse getPublishedDashboardTokenInfo( + GetPublishedDashboardTokenInfoRequest request) { + return impl.getPublishedDashboardTokenInfo(request); + } + public LakeviewEmbeddedService impl() { return impl; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java index 637c8310e..38c982eb1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java @@ -30,4 +30,20 @@ public void getPublishedDashboardEmbedded(GetPublishedDashboardEmbeddedRequest r throw new DatabricksException("IO error: " + e.getMessage(), e); } } + + @Override + public GetPublishedDashboardTokenInfoResponse getPublishedDashboardTokenInfo( + GetPublishedDashboardTokenInfoRequest request) { + String path = + String.format( + "/api/2.0/lakeview/dashboards/%s/published/tokeninfo", request.getDashboardId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetPublishedDashboardTokenInfoResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java index ab5f9df94..a7fbb8cdb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java @@ -19,4 +19,17 @@ public interface LakeviewEmbeddedService { */ void getPublishedDashboardEmbedded( GetPublishedDashboardEmbeddedRequest getPublishedDashboardEmbeddedRequest); + + /** + * Read an information of a published dashboard to mint an OAuth token. + * + *

Get a required authorization details and scopes of a published dashboard to mint an OAuth + * token. The `authorization_details` can be enriched to apply additional restriction. + * + *

Example: Adding the following `authorization_details` object to downscope the viewer + * permission to specific table ``` { type: "unity_catalog_privileges", privileges: ["SELECT"], + * object_type: "TABLE", object_full_path: "main.default.testdata" } ``` + */ + GetPublishedDashboardTokenInfoResponse getPublishedDashboardTokenInfo( + GetPublishedDashboardTokenInfoRequest getPublishedDashboardTokenInfoRequest); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/AuthenticationMethod.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/AuthenticationMethod.java new file mode 100755 index 000000000..121d75699 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/AuthenticationMethod.java @@ -0,0 +1,11 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum AuthenticationMethod { + OAUTH, + PAT, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java index 9fc3a5d45..fa9f0ab77 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java @@ -32,8 +32,8 @@ public class BaseJob { private String effectiveBudgetPolicyId; /** - * Indicates if the job has more sub-resources (`tasks`, `job_clusters`) that are not shown. They - * can be accessed via :method:jobs/get endpoint. It is only relevant for API 2.2 + * Indicates if the job has more array properties (`tasks`, `job_clusters`) that are not shown. + * They can be accessed via :method:jobs/get endpoint. It is only relevant for API 2.2 * :method:jobs/list requests with `expand_tasks=true`. */ @JsonProperty("has_more") diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java index 9dbd706f9..d6864e7ba 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java @@ -52,9 +52,13 @@ public class BaseRun { private String description; /** - * effective_performance_target is the actual performance target used by the run during execution. - * effective_performance_target can differ from the client-set performance_target depending on if - * the job was eligible to be cost-optimized. + * The actual performance target used by the serverless run during execution. This can differ from + * the client-set performance target on the request depending on whether the performance mode is + * supported by the job type. + * + *

* `STANDARD`: Enables cost-efficient execution of serverless workloads. * + * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and + * optimized cluster performance. */ @JsonProperty("effective_performance_target") private PerformanceTarget effectivePerformanceTarget; @@ -91,8 +95,8 @@ public class BaseRun { private GitSource gitSource; /** - * Indicates if the run has more sub-resources (`tasks`, `job_clusters`) that are not shown. They - * can be accessed via :method:jobs/getrun endpoint. It is only relevant for API 2.2 + * Indicates if the run has more array properties (`tasks`, `job_clusters`) that are not shown. + * They can be accessed via :method:jobs/getrun endpoint. It is only relevant for API 2.2 * :method:jobs/listruns requests with `expand_tasks=true`. */ @JsonProperty("has_more") diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ComputeConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ComputeConfig.java index c183e1b1a..af181cd7f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ComputeConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ComputeConfig.java @@ -7,7 +7,6 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; -/** Next field: 4 */ @Generated public class ComputeConfig { /** IDof the GPU pool to use. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java index 8fa81b9d6..a01c273e7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java @@ -94,8 +94,7 @@ public class CreateJob { /** * A list of job cluster specifications that can be shared and reused by tasks of this job. * Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in - * task settings. If more than 100 job clusters are available, you can paginate through them using - * :method:jobs/get. + * task settings. */ @JsonProperty("job_clusters") private Collection jobClusters; @@ -129,8 +128,12 @@ public class CreateJob { private Collection parameters; /** - * PerformanceTarget defines how performant or cost efficient the execution of run on serverless - * should be. + * The performance mode on a serverless job. The performance target determines the level of + * compute performance or cost-efficiency for the run. + * + *

* `STANDARD`: Enables cost-efficient execution of serverless workloads. * + * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and + * optimized cluster performance. */ @JsonProperty("performance_target") private PerformanceTarget performanceTarget; @@ -165,9 +168,11 @@ public class CreateJob { private Map tags; /** - * A list of task specifications to be executed by this job. If more than 100 tasks are available, - * you can paginate through them using :method:jobs/get. Use the `next_page_token` field at the - * object root to determine if more results are available. + * A list of task specifications to be executed by this job. It supports up to 1000 elements in + * write endpoints (:method:jobs/create, :method:jobs/reset, :method:jobs/update, + * :method:jobs/submit). Read endpoints return only 100 tasks. If more than 100 tasks are + * available, you can paginate through them using :method:jobs/get. Use the `next_page_token` + * field at the object root to determine if more results are available. */ @JsonProperty("tasks") private Collection tasks; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardPageSnapshot.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardPageSnapshot.java new file mode 100755 index 000000000..aba88b35f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardPageSnapshot.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class DashboardPageSnapshot { + /** */ + @JsonProperty("page_display_name") + private String pageDisplayName; + + /** */ + @JsonProperty("widget_error_details") + private Collection widgetErrorDetails; + + public DashboardPageSnapshot setPageDisplayName(String pageDisplayName) { + this.pageDisplayName = pageDisplayName; + return this; + } + + public String getPageDisplayName() { + return pageDisplayName; + } + + public DashboardPageSnapshot setWidgetErrorDetails( + Collection widgetErrorDetails) { + this.widgetErrorDetails = widgetErrorDetails; + return this; + } + + public Collection getWidgetErrorDetails() { + return widgetErrorDetails; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DashboardPageSnapshot that = (DashboardPageSnapshot) o; + return Objects.equals(pageDisplayName, that.pageDisplayName) + && Objects.equals(widgetErrorDetails, that.widgetErrorDetails); + } + + @Override + public int hashCode() { + return Objects.hash(pageDisplayName, widgetErrorDetails); + } + + @Override + public String toString() { + return new ToStringer(DashboardPageSnapshot.class) + .add("pageDisplayName", pageDisplayName) + .add("widgetErrorDetails", widgetErrorDetails) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardTask.java new file mode 100755 index 000000000..897c1c11a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardTask.java @@ -0,0 +1,75 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Configures the Lakeview Dashboard job task type. */ +@Generated +public class DashboardTask { + /** */ + @JsonProperty("dashboard_id") + private String dashboardId; + + /** */ + @JsonProperty("subscription") + private Subscription subscription; + + /** The warehouse id to execute the dashboard with for the schedule */ + @JsonProperty("warehouse_id") + private String warehouseId; + + public DashboardTask setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public DashboardTask setSubscription(Subscription subscription) { + this.subscription = subscription; + return this; + } + + public Subscription getSubscription() { + return subscription; + } + + public DashboardTask setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DashboardTask that = (DashboardTask) o; + return Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(subscription, that.subscription) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId, subscription, warehouseId); + } + + @Override + public String toString() { + return new ToStringer(DashboardTask.class) + .add("dashboardId", dashboardId) + .add("subscription", subscription) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardTaskOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardTaskOutput.java new file mode 100755 index 000000000..51ccfd9ea --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardTaskOutput.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class DashboardTaskOutput { + /** Should only be populated for manual PDF download jobs. */ + @JsonProperty("page_snapshots") + private Collection pageSnapshots; + + public DashboardTaskOutput setPageSnapshots(Collection pageSnapshots) { + this.pageSnapshots = pageSnapshots; + return this; + } + + public Collection getPageSnapshots() { + return pageSnapshots; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DashboardTaskOutput that = (DashboardTaskOutput) o; + return Objects.equals(pageSnapshots, that.pageSnapshots); + } + + @Override + public int hashCode() { + return Objects.hash(pageSnapshots); + } + + @Override + public String toString() { + return new ToStringer(DashboardTaskOutput.class).add("pageSnapshots", pageSnapshots).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GenAiComputeTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GenAiComputeTask.java index 68e0257df..28df26085 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GenAiComputeTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GenAiComputeTask.java @@ -7,14 +7,13 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; -/** Next field: 9 */ @Generated public class GenAiComputeTask { /** Command launcher to run the actual script, e.g. bash, python etc. */ @JsonProperty("command") private String command; - /** Next field: 4 */ + /** */ @JsonProperty("compute") private ComputeConfig compute; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobRequest.java index 74fc2f572..9a1064c89 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobRequest.java @@ -17,8 +17,8 @@ public class GetJobRequest { private Long jobId; /** - * Use `next_page_token` returned from the previous GetJob to request the next page of the job's - * sub-resources. + * Use `next_page_token` returned from the previous GetJob response to request the next page of + * the job's array properties. */ @JsonIgnore @QueryParam("page_token") diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunRequest.java index 9b2c2e9e5..afa115088 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunRequest.java @@ -22,8 +22,8 @@ public class GetRunRequest { private Boolean includeResolvedValues; /** - * Use `next_page_token` returned from the previous GetRun to request the next page of the run's - * sub-resources. + * Use `next_page_token` returned from the previous GetRun response to request the next page of + * the run's array properties. */ @JsonIgnore @QueryParam("page_token") diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java index f996f662b..062121875 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java @@ -33,8 +33,8 @@ public class Job { private String effectiveBudgetPolicyId; /** - * Indicates if the job has more sub-resources (`tasks`, `job_clusters`) that are not shown. They - * can be accessed via :method:jobs/get endpoint. It is only relevant for API 2.2 + * Indicates if the job has more array properties (`tasks`, `job_clusters`) that are not shown. + * They can be accessed via :method:jobs/get endpoint. It is only relevant for API 2.2 * :method:jobs/list requests with `expand_tasks=true`. */ @JsonProperty("has_more") @@ -44,7 +44,7 @@ public class Job { @JsonProperty("job_id") private Long jobId; - /** A token that can be used to list the next page of sub-resources. */ + /** A token that can be used to list the next page of array properties. */ @JsonProperty("next_page_token") private String nextPageToken; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java index eb3e89525..668e15467 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java @@ -90,8 +90,7 @@ public class JobSettings { /** * A list of job cluster specifications that can be shared and reused by tasks of this job. * Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in - * task settings. If more than 100 job clusters are available, you can paginate through them using - * :method:jobs/get. + * task settings. */ @JsonProperty("job_clusters") private Collection jobClusters; @@ -125,8 +124,12 @@ public class JobSettings { private Collection parameters; /** - * PerformanceTarget defines how performant or cost efficient the execution of run on serverless - * should be. + * The performance mode on a serverless job. The performance target determines the level of + * compute performance or cost-efficiency for the run. + * + *

* `STANDARD`: Enables cost-efficient execution of serverless workloads. * + * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and + * optimized cluster performance. */ @JsonProperty("performance_target") private PerformanceTarget performanceTarget; @@ -161,9 +164,11 @@ public class JobSettings { private Map tags; /** - * A list of task specifications to be executed by this job. If more than 100 tasks are available, - * you can paginate through them using :method:jobs/get. Use the `next_page_token` field at the - * object root to determine if more results are available. + * A list of task specifications to be executed by this job. It supports up to 1000 elements in + * write endpoints (:method:jobs/create, :method:jobs/reset, :method:jobs/update, + * :method:jobs/submit). Read endpoints return only 100 tasks. If more than 100 tasks are + * available, you can paginate through them using :method:jobs/get. Use the `next_page_token` + * field at the object root to determine if more results are available. */ @JsonProperty("tasks") private Collection tasks; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java index 5542bb665..3acf1dace 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java @@ -183,10 +183,13 @@ public Job get(long jobId) { * *

Retrieves the details for a single job. * - *

In Jobs API 2.2, requests for a single job support pagination of `tasks` and `job_clusters` - * when either exceeds 100 elements. Use the `next_page_token` field to check for more results and - * pass its value as the `page_token` in subsequent requests. Arrays with fewer than 100 elements - * in a page will be empty on later pages. + *

Large arrays in the results will be paginated when they exceed 100 elements. A request for a + * single job will return all properties for that job, and the first 100 elements of array + * properties (`tasks`, `job_clusters`, `environments` and `parameters`). Use the + * `next_page_token` field to check for more results and pass its value as the `page_token` in + * subsequent requests. If any array properties have more than 100 elements, additional results + * will be returned on subsequent requests. Arrays without additional results will be empty on + * later pages. */ public Job get(GetJobRequest request) { return impl.get(request); @@ -227,10 +230,13 @@ public Run getRun(long runId) { * *

Retrieves the metadata of a run. * - *

In Jobs API 2.2, requests for a single job run support pagination of `tasks` and - * `job_clusters` when either exceeds 100 elements. Use the `next_page_token` field to check for - * more results and pass its value as the `page_token` in subsequent requests. Arrays with fewer - * than 100 elements in a page will be empty on later pages. + *

Large arrays in the results will be paginated when they exceed 100 elements. A request for a + * single run will return all properties for that run, and the first 100 elements of array + * properties (`tasks`, `job_clusters`, `job_parameters` and `repair_history`). Use the + * next_page_token field to check for more results and pass its value as the page_token in + * subsequent requests. If any array properties have more than 100 elements, additional results + * will be returned on subsequent requests. Arrays without additional results will be empty on + * later pages. */ public Run getRun(GetRunRequest request) { return impl.getRun(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java index 2b8b9ee9c..d99c7f062 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java @@ -77,10 +77,13 @@ public interface JobsService { * *

Retrieves the details for a single job. * - *

In Jobs API 2.2, requests for a single job support pagination of `tasks` and `job_clusters` - * when either exceeds 100 elements. Use the `next_page_token` field to check for more results and - * pass its value as the `page_token` in subsequent requests. Arrays with fewer than 100 elements - * in a page will be empty on later pages. + *

Large arrays in the results will be paginated when they exceed 100 elements. A request for a + * single job will return all properties for that job, and the first 100 elements of array + * properties (`tasks`, `job_clusters`, `environments` and `parameters`). Use the + * `next_page_token` field to check for more results and pass its value as the `page_token` in + * subsequent requests. If any array properties have more than 100 elements, additional results + * will be returned on subsequent requests. Arrays without additional results will be empty on + * later pages. */ Job get(GetJobRequest getJobRequest); @@ -104,10 +107,13 @@ GetJobPermissionLevelsResponse getPermissionLevels( * *

Retrieves the metadata of a run. * - *

In Jobs API 2.2, requests for a single job run support pagination of `tasks` and - * `job_clusters` when either exceeds 100 elements. Use the `next_page_token` field to check for - * more results and pass its value as the `page_token` in subsequent requests. Arrays with fewer - * than 100 elements in a page will be empty on later pages. + *

Large arrays in the results will be paginated when they exceed 100 elements. A request for a + * single run will return all properties for that run, and the first 100 elements of array + * properties (`tasks`, `job_clusters`, `job_parameters` and `repair_history`). Use the + * next_page_token field to check for more results and pass its value as the page_token in + * subsequent requests. If any array properties have more than 100 elements, additional results + * will be returned on subsequent requests. Arrays without additional results will be empty on + * later pages. */ Run getRun(GetRunRequest getRunRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsRequest.java index b49e9330e..cae63d9e8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsRequest.java @@ -12,9 +12,8 @@ @Generated public class ListJobsRequest { /** - * Whether to include task and cluster details in the response. Note that in API 2.2, only the - * first 100 elements will be shown. Use :method:jobs/get to paginate through all tasks and - * clusters. + * Whether to include task and cluster details in the response. Note that only the first 100 + * elements will be shown. Use :method:jobs/get to paginate through all tasks and clusters. */ @JsonIgnore @QueryParam("expand_tasks") diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsRequest.java index 19b36509b..2f28cf7ef 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsRequest.java @@ -29,9 +29,8 @@ public class ListRunsRequest { private Boolean completedOnly; /** - * Whether to include task and cluster details in the response. Note that in API 2.2, only the - * first 100 elements will be shown. Use :method:jobs/getrun to paginate through all tasks and - * clusters. + * Whether to include task and cluster details in the response. Note that only the first 100 + * elements will be shown. Use :method:jobs/getrun to paginate through all tasks and clusters. */ @JsonIgnore @QueryParam("expand_tasks") diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PerformanceTarget.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PerformanceTarget.java index ddf83eae9..77dd36664 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PerformanceTarget.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PerformanceTarget.java @@ -11,7 +11,6 @@ */ @Generated public enum PerformanceTarget { - BALANCED, - COST_OPTIMIZED, PERFORMANCE_OPTIMIZED, + STANDARD, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiModel.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiModel.java new file mode 100755 index 000000000..db64f341b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiModel.java @@ -0,0 +1,105 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class PowerBiModel { + /** How the published Power BI model authenticates to Databricks */ + @JsonProperty("authentication_method") + private AuthenticationMethod authenticationMethod; + + /** The name of the Power BI model */ + @JsonProperty("model_name") + private String modelName; + + /** Whether to overwrite existing Power BI models */ + @JsonProperty("overwrite_existing") + private Boolean overwriteExisting; + + /** The default storage mode of the Power BI model */ + @JsonProperty("storage_mode") + private StorageMode storageMode; + + /** The name of the Power BI workspace of the model */ + @JsonProperty("workspace_name") + private String workspaceName; + + public PowerBiModel setAuthenticationMethod(AuthenticationMethod authenticationMethod) { + this.authenticationMethod = authenticationMethod; + return this; + } + + public AuthenticationMethod getAuthenticationMethod() { + return authenticationMethod; + } + + public PowerBiModel setModelName(String modelName) { + this.modelName = modelName; + return this; + } + + public String getModelName() { + return modelName; + } + + public PowerBiModel setOverwriteExisting(Boolean overwriteExisting) { + this.overwriteExisting = overwriteExisting; + return this; + } + + public Boolean getOverwriteExisting() { + return overwriteExisting; + } + + public PowerBiModel setStorageMode(StorageMode storageMode) { + this.storageMode = storageMode; + return this; + } + + public StorageMode getStorageMode() { + return storageMode; + } + + public PowerBiModel setWorkspaceName(String workspaceName) { + this.workspaceName = workspaceName; + return this; + } + + public String getWorkspaceName() { + return workspaceName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PowerBiModel that = (PowerBiModel) o; + return Objects.equals(authenticationMethod, that.authenticationMethod) + && Objects.equals(modelName, that.modelName) + && Objects.equals(overwriteExisting, that.overwriteExisting) + && Objects.equals(storageMode, that.storageMode) + && Objects.equals(workspaceName, that.workspaceName); + } + + @Override + public int hashCode() { + return Objects.hash( + authenticationMethod, modelName, overwriteExisting, storageMode, workspaceName); + } + + @Override + public String toString() { + return new ToStringer(PowerBiModel.class) + .add("authenticationMethod", authenticationMethod) + .add("modelName", modelName) + .add("overwriteExisting", overwriteExisting) + .add("storageMode", storageMode) + .add("workspaceName", workspaceName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiTable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiTable.java new file mode 100755 index 000000000..1de4cb25d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiTable.java @@ -0,0 +1,89 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class PowerBiTable { + /** The catalog name in Databricks */ + @JsonProperty("catalog") + private String catalog; + + /** The table name in Databricks */ + @JsonProperty("name") + private String name; + + /** The schema name in Databricks */ + @JsonProperty("schema") + private String schema; + + /** The Power BI storage mode of the table */ + @JsonProperty("storage_mode") + private StorageMode storageMode; + + public PowerBiTable setCatalog(String catalog) { + this.catalog = catalog; + return this; + } + + public String getCatalog() { + return catalog; + } + + public PowerBiTable setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public PowerBiTable setSchema(String schema) { + this.schema = schema; + return this; + } + + public String getSchema() { + return schema; + } + + public PowerBiTable setStorageMode(StorageMode storageMode) { + this.storageMode = storageMode; + return this; + } + + public StorageMode getStorageMode() { + return storageMode; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PowerBiTable that = (PowerBiTable) o; + return Objects.equals(catalog, that.catalog) + && Objects.equals(name, that.name) + && Objects.equals(schema, that.schema) + && Objects.equals(storageMode, that.storageMode); + } + + @Override + public int hashCode() { + return Objects.hash(catalog, name, schema, storageMode); + } + + @Override + public String toString() { + return new ToStringer(PowerBiTable.class) + .add("catalog", catalog) + .add("name", name) + .add("schema", schema) + .add("storageMode", storageMode) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiTask.java new file mode 100755 index 000000000..53e0de86d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PowerBiTask.java @@ -0,0 +1,106 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class PowerBiTask { + /** The resource name of the UC connection to authenticate from Databricks to Power BI */ + @JsonProperty("connection_resource_name") + private String connectionResourceName; + + /** The semantic model to update */ + @JsonProperty("power_bi_model") + private PowerBiModel powerBiModel; + + /** Whether the model should be refreshed after the update */ + @JsonProperty("refresh_after_update") + private Boolean refreshAfterUpdate; + + /** The tables to be exported to Power BI */ + @JsonProperty("tables") + private Collection tables; + + /** The SQL warehouse ID to use as the Power BI data source */ + @JsonProperty("warehouse_id") + private String warehouseId; + + public PowerBiTask setConnectionResourceName(String connectionResourceName) { + this.connectionResourceName = connectionResourceName; + return this; + } + + public String getConnectionResourceName() { + return connectionResourceName; + } + + public PowerBiTask setPowerBiModel(PowerBiModel powerBiModel) { + this.powerBiModel = powerBiModel; + return this; + } + + public PowerBiModel getPowerBiModel() { + return powerBiModel; + } + + public PowerBiTask setRefreshAfterUpdate(Boolean refreshAfterUpdate) { + this.refreshAfterUpdate = refreshAfterUpdate; + return this; + } + + public Boolean getRefreshAfterUpdate() { + return refreshAfterUpdate; + } + + public PowerBiTask setTables(Collection tables) { + this.tables = tables; + return this; + } + + public Collection getTables() { + return tables; + } + + public PowerBiTask setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PowerBiTask that = (PowerBiTask) o; + return Objects.equals(connectionResourceName, that.connectionResourceName) + && Objects.equals(powerBiModel, that.powerBiModel) + && Objects.equals(refreshAfterUpdate, that.refreshAfterUpdate) + && Objects.equals(tables, that.tables) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash( + connectionResourceName, powerBiModel, refreshAfterUpdate, tables, warehouseId); + } + + @Override + public String toString() { + return new ToStringer(PowerBiTask.class) + .add("connectionResourceName", connectionResourceName) + .add("powerBiModel", powerBiModel) + .add("refreshAfterUpdate", refreshAfterUpdate) + .add("tables", tables) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java index be8386cf9..2e090dcd9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java @@ -53,9 +53,13 @@ public class Run { private String description; /** - * effective_performance_target is the actual performance target used by the run during execution. - * effective_performance_target can differ from the client-set performance_target depending on if - * the job was eligible to be cost-optimized. + * The actual performance target used by the serverless run during execution. This can differ from + * the client-set performance target on the request depending on whether the performance mode is + * supported by the job type. + * + *

* `STANDARD`: Enables cost-efficient execution of serverless workloads. * + * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and + * optimized cluster performance. */ @JsonProperty("effective_performance_target") private PerformanceTarget effectivePerformanceTarget; @@ -92,8 +96,8 @@ public class Run { private GitSource gitSource; /** - * Indicates if the run has more sub-resources (`tasks`, `job_clusters`) that are not shown. They - * can be accessed via :method:jobs/getrun endpoint. It is only relevant for API 2.2 + * Indicates if the run has more array properties (`tasks`, `job_clusters`) that are not shown. + * They can be accessed via :method:jobs/getrun endpoint. It is only relevant for API 2.2 * :method:jobs/listruns requests with `expand_tasks=true`. */ @JsonProperty("has_more") @@ -128,7 +132,7 @@ public class Run { @JsonProperty("job_run_id") private Long jobRunId; - /** A token that can be used to list the next page of sub-resources. */ + /** A token that can be used to list the next page of array properties. */ @JsonProperty("next_page_token") private String nextPageToken; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java index 1ca7bf0b9..4b2a28f7e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java @@ -85,9 +85,13 @@ public class RunNow { private Collection only; /** - * PerformanceTarget defines how performant or cost efficient the execution of run on serverless - * compute should be. For RunNow, this performance target will override the target defined on the - * job-level. + * The performance mode on a serverless job. The performance target determines the level of + * compute performance or cost-efficiency for the run. This field overrides the performance target + * defined on the job level. + * + *

* `STANDARD`: Enables cost-efficient execution of serverless workloads. * + * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and + * optimized cluster performance. */ @JsonProperty("performance_target") private PerformanceTarget performanceTarget; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutput.java index 711e82a8a..b635850c7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutput.java @@ -14,6 +14,10 @@ public class RunOutput { @JsonProperty("clean_rooms_notebook_output") private CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput cleanRoomsNotebookOutput; + /** The output of a dashboard task, if available */ + @JsonProperty("dashboard_output") + private DashboardTaskOutput dashboardOutput; + /** The output of a dbt task, if available. */ @JsonProperty("dbt_output") private DbtOutput dbtOutput; @@ -83,6 +87,15 @@ public CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput getCleanRoomsNotebookO return cleanRoomsNotebookOutput; } + public RunOutput setDashboardOutput(DashboardTaskOutput dashboardOutput) { + this.dashboardOutput = dashboardOutput; + return this; + } + + public DashboardTaskOutput getDashboardOutput() { + return dashboardOutput; + } + public RunOutput setDbtOutput(DbtOutput dbtOutput) { this.dbtOutput = dbtOutput; return this; @@ -179,6 +192,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; RunOutput that = (RunOutput) o; return Objects.equals(cleanRoomsNotebookOutput, that.cleanRoomsNotebookOutput) + && Objects.equals(dashboardOutput, that.dashboardOutput) && Objects.equals(dbtOutput, that.dbtOutput) && Objects.equals(error, that.error) && Objects.equals(errorTrace, that.errorTrace) @@ -195,6 +209,7 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( cleanRoomsNotebookOutput, + dashboardOutput, dbtOutput, error, errorTrace, @@ -211,6 +226,7 @@ public int hashCode() { public String toString() { return new ToStringer(RunOutput.class) .add("cleanRoomsNotebookOutput", cleanRoomsNotebookOutput) + .add("dashboardOutput", dashboardOutput) .add("dbtOutput", dbtOutput) .add("error", error) .add("errorTrace", errorTrace) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java index c1e6d25ed..773c0e307 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java @@ -53,6 +53,10 @@ public class RunTask { @JsonProperty("condition_task") private RunConditionTask conditionTask; + /** The task runs a DashboardTask when the `dashboard_task` field is present. */ + @JsonProperty("dashboard_task") + private DashboardTask dashboardTask; + /** * The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task * requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse. @@ -72,17 +76,18 @@ public class RunTask { @JsonProperty("description") private String description; - /** - * Denotes whether or not the task was disabled by the user. Disabled tasks do not execute and are - * immediately skipped as soon as they are unblocked. - */ + /** Deprecated, field was never used in production. */ @JsonProperty("disabled") private Boolean disabled; /** - * effective_performance_target is the actual performance target used by the run during execution. - * effective_performance_target can differ from the client-set performance_target depending on if - * the job was eligible to be cost-optimized. + * The actual performance target used by the serverless run during execution. This can differ from + * the client-set performance target on the request depending on whether the performance mode is + * supported by the job type. + * + *

* `STANDARD`: Enables cost-efficient execution of serverless workloads. * + * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and + * optimized cluster performance. */ @JsonProperty("effective_performance_target") private PerformanceTarget effectivePerformanceTarget; @@ -133,7 +138,7 @@ public class RunTask { @JsonProperty("for_each_task") private RunForEachTask forEachTask; - /** Next field: 9 */ + /** */ @JsonProperty("gen_ai_compute_task") private GenAiComputeTask genAiComputeTask; @@ -184,6 +189,12 @@ public class RunTask { @JsonProperty("pipeline_task") private PipelineTask pipelineTask; + /** + * The task triggers a Power BI semantic model update when the `power_bi_task` field is present. + */ + @JsonProperty("power_bi_task") + private PowerBiTask powerBiTask; + /** The task runs a Python wheel when the `python_wheel_task` field is present. */ @JsonProperty("python_wheel_task") private PythonWheelTask pythonWheelTask; @@ -346,6 +357,15 @@ public RunConditionTask getConditionTask() { return conditionTask; } + public RunTask setDashboardTask(DashboardTask dashboardTask) { + this.dashboardTask = dashboardTask; + return this; + } + + public DashboardTask getDashboardTask() { + return dashboardTask; + } + public RunTask setDbtTask(DbtTask dbtTask) { this.dbtTask = dbtTask; return this; @@ -517,6 +537,15 @@ public PipelineTask getPipelineTask() { return pipelineTask; } + public RunTask setPowerBiTask(PowerBiTask powerBiTask) { + this.powerBiTask = powerBiTask; + return this; + } + + public PowerBiTask getPowerBiTask() { + return powerBiTask; + } + public RunTask setPythonWheelTask(PythonWheelTask pythonWheelTask) { this.pythonWheelTask = pythonWheelTask; return this; @@ -698,6 +727,7 @@ public boolean equals(Object o) { && Objects.equals(cleanupDuration, that.cleanupDuration) && Objects.equals(clusterInstance, that.clusterInstance) && Objects.equals(conditionTask, that.conditionTask) + && Objects.equals(dashboardTask, that.dashboardTask) && Objects.equals(dbtTask, that.dbtTask) && Objects.equals(dependsOn, that.dependsOn) && Objects.equals(description, that.description) @@ -717,6 +747,7 @@ public boolean equals(Object o) { && Objects.equals(notebookTask, that.notebookTask) && Objects.equals(notificationSettings, that.notificationSettings) && Objects.equals(pipelineTask, that.pipelineTask) + && Objects.equals(powerBiTask, that.powerBiTask) && Objects.equals(pythonWheelTask, that.pythonWheelTask) && Objects.equals(queueDuration, that.queueDuration) && Objects.equals(resolvedValues, that.resolvedValues) @@ -746,6 +777,7 @@ public int hashCode() { cleanupDuration, clusterInstance, conditionTask, + dashboardTask, dbtTask, dependsOn, description, @@ -765,6 +797,7 @@ public int hashCode() { notebookTask, notificationSettings, pipelineTask, + powerBiTask, pythonWheelTask, queueDuration, resolvedValues, @@ -794,6 +827,7 @@ public String toString() { .add("cleanupDuration", cleanupDuration) .add("clusterInstance", clusterInstance) .add("conditionTask", conditionTask) + .add("dashboardTask", dashboardTask) .add("dbtTask", dbtTask) .add("dependsOn", dependsOn) .add("description", description) @@ -813,6 +847,7 @@ public String toString() { .add("notebookTask", notebookTask) .add("notificationSettings", notificationSettings) .add("pipelineTask", pipelineTask) + .add("powerBiTask", powerBiTask) .add("pythonWheelTask", pythonWheelTask) .add("queueDuration", queueDuration) .add("resolvedValues", resolvedValues) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/StorageMode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/StorageMode.java new file mode 100755 index 000000000..7bd4ae671 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/StorageMode.java @@ -0,0 +1,12 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum StorageMode { + DIRECT_QUERY, + DUAL, + IMPORT, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java index 89f802477..09334e892 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java @@ -26,6 +26,10 @@ public class SubmitTask { @JsonProperty("condition_task") private ConditionTask conditionTask; + /** The task runs a DashboardTask when the `dashboard_task` field is present. */ + @JsonProperty("dashboard_task") + private DashboardTask dashboardTask; + /** * The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task * requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse. @@ -74,7 +78,7 @@ public class SubmitTask { @JsonProperty("for_each_task") private ForEachTask forEachTask; - /** Next field: 9 */ + /** */ @JsonProperty("gen_ai_compute_task") private GenAiComputeTask genAiComputeTask; @@ -111,6 +115,12 @@ public class SubmitTask { @JsonProperty("pipeline_task") private PipelineTask pipelineTask; + /** + * The task triggers a Power BI semantic model update when the `power_bi_task` field is present. + */ + @JsonProperty("power_bi_task") + private PowerBiTask powerBiTask; + /** The task runs a Python wheel when the `python_wheel_task` field is present. */ @JsonProperty("python_wheel_task") private PythonWheelTask pythonWheelTask; @@ -200,6 +210,15 @@ public ConditionTask getConditionTask() { return conditionTask; } + public SubmitTask setDashboardTask(DashboardTask dashboardTask) { + this.dashboardTask = dashboardTask; + return this; + } + + public DashboardTask getDashboardTask() { + return dashboardTask; + } + public SubmitTask setDbtTask(DbtTask dbtTask) { this.dbtTask = dbtTask; return this; @@ -326,6 +345,15 @@ public PipelineTask getPipelineTask() { return pipelineTask; } + public SubmitTask setPowerBiTask(PowerBiTask powerBiTask) { + this.powerBiTask = powerBiTask; + return this; + } + + public PowerBiTask getPowerBiTask() { + return powerBiTask; + } + public SubmitTask setPythonWheelTask(PythonWheelTask pythonWheelTask) { this.pythonWheelTask = pythonWheelTask; return this; @@ -423,6 +451,7 @@ public boolean equals(Object o) { SubmitTask that = (SubmitTask) o; return Objects.equals(cleanRoomsNotebookTask, that.cleanRoomsNotebookTask) && Objects.equals(conditionTask, that.conditionTask) + && Objects.equals(dashboardTask, that.dashboardTask) && Objects.equals(dbtTask, that.dbtTask) && Objects.equals(dependsOn, that.dependsOn) && Objects.equals(description, that.description) @@ -437,6 +466,7 @@ public boolean equals(Object o) { && Objects.equals(notebookTask, that.notebookTask) && Objects.equals(notificationSettings, that.notificationSettings) && Objects.equals(pipelineTask, that.pipelineTask) + && Objects.equals(powerBiTask, that.powerBiTask) && Objects.equals(pythonWheelTask, that.pythonWheelTask) && Objects.equals(runIf, that.runIf) && Objects.equals(runJobTask, that.runJobTask) @@ -454,6 +484,7 @@ public int hashCode() { return Objects.hash( cleanRoomsNotebookTask, conditionTask, + dashboardTask, dbtTask, dependsOn, description, @@ -468,6 +499,7 @@ public int hashCode() { notebookTask, notificationSettings, pipelineTask, + powerBiTask, pythonWheelTask, runIf, runJobTask, @@ -485,6 +517,7 @@ public String toString() { return new ToStringer(SubmitTask.class) .add("cleanRoomsNotebookTask", cleanRoomsNotebookTask) .add("conditionTask", conditionTask) + .add("dashboardTask", dashboardTask) .add("dbtTask", dbtTask) .add("dependsOn", dependsOn) .add("description", description) @@ -499,6 +532,7 @@ public String toString() { .add("notebookTask", notebookTask) .add("notificationSettings", notificationSettings) .add("pipelineTask", pipelineTask) + .add("powerBiTask", powerBiTask) .add("pythonWheelTask", pythonWheelTask) .add("runIf", runIf) .add("runJobTask", runJobTask) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Subscription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Subscription.java new file mode 100755 index 000000000..40dce899a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Subscription.java @@ -0,0 +1,75 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class Subscription { + /** Optional: Allows users to specify a custom subject line on the email sent to subscribers. */ + @JsonProperty("custom_subject") + private String customSubject; + + /** When true, the subscription will not send emails. */ + @JsonProperty("paused") + private Boolean paused; + + /** */ + @JsonProperty("subscribers") + private Collection subscribers; + + public Subscription setCustomSubject(String customSubject) { + this.customSubject = customSubject; + return this; + } + + public String getCustomSubject() { + return customSubject; + } + + public Subscription setPaused(Boolean paused) { + this.paused = paused; + return this; + } + + public Boolean getPaused() { + return paused; + } + + public Subscription setSubscribers(Collection subscribers) { + this.subscribers = subscribers; + return this; + } + + public Collection getSubscribers() { + return subscribers; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Subscription that = (Subscription) o; + return Objects.equals(customSubject, that.customSubject) + && Objects.equals(paused, that.paused) + && Objects.equals(subscribers, that.subscribers); + } + + @Override + public int hashCode() { + return Objects.hash(customSubject, paused, subscribers); + } + + @Override + public String toString() { + return new ToStringer(Subscription.class) + .add("customSubject", customSubject) + .add("paused", paused) + .add("subscribers", subscribers) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubscriptionSubscriber.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubscriptionSubscriber.java new file mode 100755 index 000000000..324800689 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubscriptionSubscriber.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class SubscriptionSubscriber { + /** */ + @JsonProperty("destination_id") + private String destinationId; + + /** */ + @JsonProperty("user_name") + private String userName; + + public SubscriptionSubscriber setDestinationId(String destinationId) { + this.destinationId = destinationId; + return this; + } + + public String getDestinationId() { + return destinationId; + } + + public SubscriptionSubscriber setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SubscriptionSubscriber that = (SubscriptionSubscriber) o; + return Objects.equals(destinationId, that.destinationId) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(destinationId, userName); + } + + @Override + public String toString() { + return new ToStringer(SubscriptionSubscriber.class) + .add("destinationId", destinationId) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java index 27f43b9f3..cb619e7f2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java @@ -26,6 +26,10 @@ public class Task { @JsonProperty("condition_task") private ConditionTask conditionTask; + /** The task runs a DashboardTask when the `dashboard_task` field is present. */ + @JsonProperty("dashboard_task") + private DashboardTask dashboardTask; + /** * The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task * requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse. @@ -79,7 +83,7 @@ public class Task { @JsonProperty("for_each_task") private ForEachTask forEachTask; - /** Next field: 9 */ + /** */ @JsonProperty("gen_ai_compute_task") private GenAiComputeTask genAiComputeTask; @@ -139,6 +143,12 @@ public class Task { @JsonProperty("pipeline_task") private PipelineTask pipelineTask; + /** + * The task triggers a Power BI semantic model update when the `power_bi_task` field is present. + */ + @JsonProperty("power_bi_task") + private PowerBiTask powerBiTask; + /** The task runs a Python wheel when the `python_wheel_task` field is present. */ @JsonProperty("python_wheel_task") private PythonWheelTask pythonWheelTask; @@ -239,6 +249,15 @@ public ConditionTask getConditionTask() { return conditionTask; } + public Task setDashboardTask(DashboardTask dashboardTask) { + this.dashboardTask = dashboardTask; + return this; + } + + public DashboardTask getDashboardTask() { + return dashboardTask; + } + public Task setDbtTask(DbtTask dbtTask) { this.dbtTask = dbtTask; return this; @@ -401,6 +420,15 @@ public PipelineTask getPipelineTask() { return pipelineTask; } + public Task setPowerBiTask(PowerBiTask powerBiTask) { + this.powerBiTask = powerBiTask; + return this; + } + + public PowerBiTask getPowerBiTask() { + return powerBiTask; + } + public Task setPythonWheelTask(PythonWheelTask pythonWheelTask) { this.pythonWheelTask = pythonWheelTask; return this; @@ -507,6 +535,7 @@ public boolean equals(Object o) { Task that = (Task) o; return Objects.equals(cleanRoomsNotebookTask, that.cleanRoomsNotebookTask) && Objects.equals(conditionTask, that.conditionTask) + && Objects.equals(dashboardTask, that.dashboardTask) && Objects.equals(dbtTask, that.dbtTask) && Objects.equals(dependsOn, that.dependsOn) && Objects.equals(description, that.description) @@ -525,6 +554,7 @@ public boolean equals(Object o) { && Objects.equals(notebookTask, that.notebookTask) && Objects.equals(notificationSettings, that.notificationSettings) && Objects.equals(pipelineTask, that.pipelineTask) + && Objects.equals(powerBiTask, that.powerBiTask) && Objects.equals(pythonWheelTask, that.pythonWheelTask) && Objects.equals(retryOnTimeout, that.retryOnTimeout) && Objects.equals(runIf, that.runIf) @@ -543,6 +573,7 @@ public int hashCode() { return Objects.hash( cleanRoomsNotebookTask, conditionTask, + dashboardTask, dbtTask, dependsOn, description, @@ -561,6 +592,7 @@ public int hashCode() { notebookTask, notificationSettings, pipelineTask, + powerBiTask, pythonWheelTask, retryOnTimeout, runIf, @@ -579,6 +611,7 @@ public String toString() { return new ToStringer(Task.class) .add("cleanRoomsNotebookTask", cleanRoomsNotebookTask) .add("conditionTask", conditionTask) + .add("dashboardTask", dashboardTask) .add("dbtTask", dbtTask) .add("dependsOn", dependsOn) .add("description", description) @@ -597,6 +630,7 @@ public String toString() { .add("notebookTask", notebookTask) .add("notificationSettings", notificationSettings) .add("pipelineTask", pipelineTask) + .add("powerBiTask", powerBiTask) .add("pythonWheelTask", pythonWheelTask) .add("retryOnTimeout", retryOnTimeout) .add("runIf", runIf) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WidgetErrorDetail.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WidgetErrorDetail.java new file mode 100755 index 000000000..d40158f88 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WidgetErrorDetail.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class WidgetErrorDetail { + /** */ + @JsonProperty("message") + private String message; + + public WidgetErrorDetail setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WidgetErrorDetail that = (WidgetErrorDetail) o; + return Objects.equals(message, that.message); + } + + @Override + public int hashCode() { + return Objects.hash(message); + } + + @Override + public String toString() { + return new ToStringer(WidgetErrorDetail.class).add("message", message).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfo.java new file mode 100755 index 000000000..7f57da157 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfo.java @@ -0,0 +1,114 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ArtifactCredentialInfo { + /** + * A collection of HTTP headers that should be specified when uploading to or downloading from the + * specified `signed_uri`. + */ + @JsonProperty("headers") + private Collection headers; + + /** + * The path, relative to the Run's artifact root location, of the artifact that can be accessed + * with the credential. + */ + @JsonProperty("path") + private String path; + + /** The ID of the MLflow Run containing the artifact that can be accessed with the credential. */ + @JsonProperty("run_id") + private String runId; + + /** The signed URI credential that provides access to the artifact. */ + @JsonProperty("signed_uri") + private String signedUri; + + /** + * The type of the signed credential URI (e.g., an AWS presigned URL or an Azure Shared Access + * Signature URI). + */ + @JsonProperty("type") + private ArtifactCredentialType typeValue; + + public ArtifactCredentialInfo setHeaders(Collection headers) { + this.headers = headers; + return this; + } + + public Collection getHeaders() { + return headers; + } + + public ArtifactCredentialInfo setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + public ArtifactCredentialInfo setRunId(String runId) { + this.runId = runId; + return this; + } + + public String getRunId() { + return runId; + } + + public ArtifactCredentialInfo setSignedUri(String signedUri) { + this.signedUri = signedUri; + return this; + } + + public String getSignedUri() { + return signedUri; + } + + public ArtifactCredentialInfo setType(ArtifactCredentialType typeValue) { + this.typeValue = typeValue; + return this; + } + + public ArtifactCredentialType getType() { + return typeValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ArtifactCredentialInfo that = (ArtifactCredentialInfo) o; + return Objects.equals(headers, that.headers) + && Objects.equals(path, that.path) + && Objects.equals(runId, that.runId) + && Objects.equals(signedUri, that.signedUri) + && Objects.equals(typeValue, that.typeValue); + } + + @Override + public int hashCode() { + return Objects.hash(headers, path, runId, signedUri, typeValue); + } + + @Override + public String toString() { + return new ToStringer(ArtifactCredentialInfo.class) + .add("headers", headers) + .add("path", path) + .add("runId", runId) + .add("signedUri", signedUri) + .add("typeValue", typeValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfoHttpHeader.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfoHttpHeader.java new file mode 100755 index 000000000..053a8991c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfoHttpHeader.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class ArtifactCredentialInfoHttpHeader { + /** The HTTP header name. */ + @JsonProperty("name") + private String name; + + /** The HTTP header value. */ + @JsonProperty("value") + private String value; + + public ArtifactCredentialInfoHttpHeader setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ArtifactCredentialInfoHttpHeader setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ArtifactCredentialInfoHttpHeader that = (ArtifactCredentialInfoHttpHeader) o; + return Objects.equals(name, that.name) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(name, value); + } + + @Override + public String toString() { + return new ToStringer(ArtifactCredentialInfoHttpHeader.class) + .add("name", name) + .add("value", value) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialType.java new file mode 100755 index 000000000..ec4cf4370 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialType.java @@ -0,0 +1,14 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; + +/** The type of a given artifact access credential */ +@Generated +public enum ArtifactCredentialType { + AWS_PRESIGNED_URL, + AZURE_ADLS_GEN2_SAS_URI, + AZURE_SAS_URI, + GCP_SIGNED_URL, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentRequest.java index e1ca8823b..6810b04c4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentRequest.java @@ -10,53 +10,55 @@ @Generated public class CreateForecastingExperimentRequest { - /** - * Name of the column in the input training table used to customize the weight for each time - * series to calculate weighted metrics. - */ + /** The column in the training table used to customize weights for each time series. */ @JsonProperty("custom_weights_column") private String customWeightsColumn; - /** - * The path to the created experiment. This is the path where the experiment will be stored in the - * workspace. - */ + /** The path in the workspace to store the created experiment. */ @JsonProperty("experiment_path") private String experimentPath; /** - * The granularity of the forecast. This defines the time interval between consecutive rows in the - * time series data. Possible values: '1 second', '1 minute', '5 minutes', '10 minutes', '15 - * minutes', '30 minutes', 'Hourly', 'Daily', 'Weekly', 'Monthly', 'Quarterly', 'Yearly'. + * The time interval between consecutive rows in the time series data. Possible values include: '1 + * second', '1 minute', '5 minutes', '10 minutes', '15 minutes', '30 minutes', 'Hourly', 'Daily', + * 'Weekly', 'Monthly', 'Quarterly', 'Yearly'. */ @JsonProperty("forecast_granularity") private String forecastGranularity; /** - * The number of time steps into the future for which predictions should be made. This value - * represents a multiple of forecast_granularity determining how far ahead the model will - * forecast. + * The number of time steps into the future to make predictions, calculated as a multiple of + * forecast_granularity. This value represents how far ahead the model should forecast. */ @JsonProperty("forecast_horizon") private Long forecastHorizon; /** - * Region code(s) to consider when automatically adding holiday features. When empty, no holiday - * features are added. Only supports 1 holiday region for now. + * The region code(s) to automatically add holiday features. Currently supports only one region. */ @JsonProperty("holiday_regions") private Collection holidayRegions; /** - * The maximum duration in minutes for which the experiment is allowed to run. If the experiment - * exceeds this time limit it will be stopped automatically. + * Specifies the list of feature columns to include in model training. These columns must exist in + * the training data and be of type string, numerical, or boolean. If not specified, no additional + * features will be included. Note: Certain columns are automatically handled: - Automatically + * excluded: split_column, target_column, custom_weights_column. - Automatically included: + * time_column. + */ + @JsonProperty("include_features") + private Collection includeFeatures; + + /** + * The maximum duration for the experiment in minutes. The experiment stops automatically if it + * exceeds this limit. */ @JsonProperty("max_runtime") private Long maxRuntime; /** - * The three-level (fully qualified) path to a unity catalog table. This table path serves to - * store the predictions. + * The fully qualified path of a Unity Catalog table, formatted as + * catalog_name.schema_name.table_name, used to store predictions. */ @JsonProperty("prediction_data_path") private String predictionDataPath; @@ -66,47 +68,47 @@ public class CreateForecastingExperimentRequest { private String primaryMetric; /** - * The three-level (fully qualified) path to a unity catalog model. This model path serves to - * store the best model. + * The fully qualified path of a Unity Catalog model, formatted as + * catalog_name.schema_name.model_name, used to store the best model. */ @JsonProperty("register_to") private String registerTo; /** - * Name of the column in the input training table used for custom data splits. The values in this - * column must be "train", "validate", or "test" to indicate which split each row belongs to. + * // The column in the training table used for custom data splits. Values must be 'train', + * 'validate', or 'test'. */ @JsonProperty("split_column") private String splitColumn; /** - * Name of the column in the input training table that serves as the prediction target. The values - * in this column will be used as the ground truth for model training. + * The column in the input training table used as the prediction target for model training. The + * values in this column are used as the ground truth for model training. */ @JsonProperty("target_column") private String targetColumn; - /** Name of the column in the input training table that represents the timestamp of each row. */ + /** The column in the input training table that represents each row's timestamp. */ @JsonProperty("time_column") private String timeColumn; /** - * Name of the column in the input training table used to group the dataset to predict individual - * time series + * The column in the training table used to group the dataset for predicting individual time + * series. */ @JsonProperty("timeseries_identifier_columns") private Collection timeseriesIdentifierColumns; /** - * The three-level (fully qualified) name of a unity catalog table. This table serves as the - * training data for the forecasting model. + * The fully qualified name of a Unity Catalog table, formatted as + * catalog_name.schema_name.table_name, used as training data for the forecasting model. */ @JsonProperty("train_data_path") private String trainDataPath; /** - * The list of frameworks to include for model tuning. Possible values: 'Prophet', 'ARIMA', - * 'DeepAR'. An empty list will include all supported frameworks. + * List of frameworks to include for model tuning. Possible values are 'Prophet', 'ARIMA', + * 'DeepAR'. An empty list includes all supported frameworks. */ @JsonProperty("training_frameworks") private Collection trainingFrameworks; @@ -156,6 +158,15 @@ public Collection getHolidayRegions() { return holidayRegions; } + public CreateForecastingExperimentRequest setIncludeFeatures(Collection includeFeatures) { + this.includeFeatures = includeFeatures; + return this; + } + + public Collection getIncludeFeatures() { + return includeFeatures; + } + public CreateForecastingExperimentRequest setMaxRuntime(Long maxRuntime) { this.maxRuntime = maxRuntime; return this; @@ -258,6 +269,7 @@ public boolean equals(Object o) { && Objects.equals(forecastGranularity, that.forecastGranularity) && Objects.equals(forecastHorizon, that.forecastHorizon) && Objects.equals(holidayRegions, that.holidayRegions) + && Objects.equals(includeFeatures, that.includeFeatures) && Objects.equals(maxRuntime, that.maxRuntime) && Objects.equals(predictionDataPath, that.predictionDataPath) && Objects.equals(primaryMetric, that.primaryMetric) @@ -278,6 +290,7 @@ public int hashCode() { forecastGranularity, forecastHorizon, holidayRegions, + includeFeatures, maxRuntime, predictionDataPath, primaryMetric, @@ -298,6 +311,7 @@ public String toString() { .add("forecastGranularity", forecastGranularity) .add("forecastHorizon", forecastHorizon) .add("holidayRegions", holidayRegions) + .add("includeFeatures", includeFeatures) .add("maxRuntime", maxRuntime) .add("predictionDataPath", predictionDataPath) .add("primaryMetric", primaryMetric) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java index 65d6f355d..053dd8525 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java @@ -136,6 +136,30 @@ public GetExperimentByNameResponse getByName(GetByNameRequest request) { return impl.getByName(request); } + public GetCredentialsForTraceDataDownloadResponse getCredentialsForTraceDataDownload( + String requestId) { + return getCredentialsForTraceDataDownload( + new GetCredentialsForTraceDataDownloadRequest().setRequestId(requestId)); + } + + /** Get credentials to download trace data. */ + public GetCredentialsForTraceDataDownloadResponse getCredentialsForTraceDataDownload( + GetCredentialsForTraceDataDownloadRequest request) { + return impl.getCredentialsForTraceDataDownload(request); + } + + public GetCredentialsForTraceDataUploadResponse getCredentialsForTraceDataUpload( + String requestId) { + return getCredentialsForTraceDataUpload( + new GetCredentialsForTraceDataUploadRequest().setRequestId(requestId)); + } + + /** Get credentials to upload trace data. */ + public GetCredentialsForTraceDataUploadResponse getCredentialsForTraceDataUpload( + GetCredentialsForTraceDataUploadRequest request) { + return impl.getCredentialsForTraceDataUpload(request); + } + public GetExperimentResponse getExperiment(String experimentId) { return getExperiment(new GetExperimentRequest().setExperimentId(experimentId)); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java index d8a23c091..0e3b7116b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java @@ -113,6 +113,38 @@ public GetExperimentByNameResponse getByName(GetByNameRequest request) { } } + @Override + public GetCredentialsForTraceDataDownloadResponse getCredentialsForTraceDataDownload( + GetCredentialsForTraceDataDownloadRequest request) { + String path = + String.format( + "/api/2.0/mlflow/traces/%s/credentials-for-data-download", request.getRequestId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetCredentialsForTraceDataDownloadResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public GetCredentialsForTraceDataUploadResponse getCredentialsForTraceDataUpload( + GetCredentialsForTraceDataUploadRequest request) { + String path = + String.format( + "/api/2.0/mlflow/traces/%s/credentials-for-data-upload", request.getRequestId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetCredentialsForTraceDataUploadResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public GetExperimentResponse getExperiment(GetExperimentRequest request) { String path = "/api/2.0/mlflow/experiments/get"; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java index 43efb3908..ec7ca96ea 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java @@ -83,6 +83,14 @@ public interface ExperimentsService { */ GetExperimentByNameResponse getByName(GetByNameRequest getByNameRequest); + /** Get credentials to download trace data. */ + GetCredentialsForTraceDataDownloadResponse getCredentialsForTraceDataDownload( + GetCredentialsForTraceDataDownloadRequest getCredentialsForTraceDataDownloadRequest); + + /** Get credentials to upload trace data. */ + GetCredentialsForTraceDataUploadResponse getCredentialsForTraceDataUpload( + GetCredentialsForTraceDataUploadRequest getCredentialsForTraceDataUploadRequest); + /** * Get an experiment. * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FileInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FileInfo.java index 03278d8e4..1b6cef6c6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FileInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FileInfo.java @@ -10,7 +10,7 @@ /** Metadata of a single artifact file or directory. */ @Generated public class FileInfo { - /** Size in bytes. Unset for directories. */ + /** The size in bytes of the file. Unset for directories. */ @JsonProperty("file_size") private Long fileSize; @@ -18,7 +18,7 @@ public class FileInfo { @JsonProperty("is_dir") private Boolean isDir; - /** Path relative to the root artifact directory run. */ + /** The path relative to the root artifact directory run. */ @JsonProperty("path") private String path; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadRequest.java new file mode 100755 index 000000000..42aac217e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadRequest.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get credentials to download trace data */ +@Generated +public class GetCredentialsForTraceDataDownloadRequest { + /** The ID of the trace to fetch artifact download credentials for. */ + @JsonIgnore private String requestId; + + public GetCredentialsForTraceDataDownloadRequest setRequestId(String requestId) { + this.requestId = requestId; + return this; + } + + public String getRequestId() { + return requestId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetCredentialsForTraceDataDownloadRequest that = (GetCredentialsForTraceDataDownloadRequest) o; + return Objects.equals(requestId, that.requestId); + } + + @Override + public int hashCode() { + return Objects.hash(requestId); + } + + @Override + public String toString() { + return new ToStringer(GetCredentialsForTraceDataDownloadRequest.class) + .add("requestId", requestId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadResponse.java new file mode 100755 index 000000000..839e04921 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadResponse.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class GetCredentialsForTraceDataDownloadResponse { + /** The artifact download credentials for the specified trace data. */ + @JsonProperty("credential_info") + private ArtifactCredentialInfo credentialInfo; + + public GetCredentialsForTraceDataDownloadResponse setCredentialInfo( + ArtifactCredentialInfo credentialInfo) { + this.credentialInfo = credentialInfo; + return this; + } + + public ArtifactCredentialInfo getCredentialInfo() { + return credentialInfo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetCredentialsForTraceDataDownloadResponse that = + (GetCredentialsForTraceDataDownloadResponse) o; + return Objects.equals(credentialInfo, that.credentialInfo); + } + + @Override + public int hashCode() { + return Objects.hash(credentialInfo); + } + + @Override + public String toString() { + return new ToStringer(GetCredentialsForTraceDataDownloadResponse.class) + .add("credentialInfo", credentialInfo) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadRequest.java new file mode 100755 index 000000000..e7c6d452c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadRequest.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get credentials to upload trace data */ +@Generated +public class GetCredentialsForTraceDataUploadRequest { + /** The ID of the trace to fetch artifact upload credentials for. */ + @JsonIgnore private String requestId; + + public GetCredentialsForTraceDataUploadRequest setRequestId(String requestId) { + this.requestId = requestId; + return this; + } + + public String getRequestId() { + return requestId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetCredentialsForTraceDataUploadRequest that = (GetCredentialsForTraceDataUploadRequest) o; + return Objects.equals(requestId, that.requestId); + } + + @Override + public int hashCode() { + return Objects.hash(requestId); + } + + @Override + public String toString() { + return new ToStringer(GetCredentialsForTraceDataUploadRequest.class) + .add("requestId", requestId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadResponse.java new file mode 100755 index 000000000..9dcaed06c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadResponse.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class GetCredentialsForTraceDataUploadResponse { + /** The artifact upload credentials for the specified trace data. */ + @JsonProperty("credential_info") + private ArtifactCredentialInfo credentialInfo; + + public GetCredentialsForTraceDataUploadResponse setCredentialInfo( + ArtifactCredentialInfo credentialInfo) { + this.credentialInfo = credentialInfo; + return this; + } + + public ArtifactCredentialInfo getCredentialInfo() { + return credentialInfo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetCredentialsForTraceDataUploadResponse that = (GetCredentialsForTraceDataUploadResponse) o; + return Objects.equals(credentialInfo, that.credentialInfo); + } + + @Override + public int hashCode() { + return Objects.hash(credentialInfo); + } + + @Override + public String toString() { + return new ToStringer(GetCredentialsForTraceDataUploadResponse.class) + .add("credentialInfo", credentialInfo) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsRequest.java index d7a68b8ed..d1211a376 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsRequest.java @@ -12,7 +12,7 @@ @Generated public class ListArtifactsRequest { /** - * Token indicating the page of artifact results to fetch. `page_token` is not supported when + * The token indicating the page of artifact results to fetch. `page_token` is not supported when * listing artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved for UC Volumes. * Please call `/api/2.0/fs/directories{directory_path}` for listing artifacts in UC Volumes, * which supports pagination. See [List directory contents | Files diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsResponse.java index 008168efc..dac42941b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsResponse.java @@ -10,15 +10,15 @@ @Generated public class ListArtifactsResponse { - /** File location and metadata for artifacts. */ + /** The file location and metadata for artifacts. */ @JsonProperty("files") private Collection files; - /** Token that can be used to retrieve the next page of artifact results */ + /** The token that can be used to retrieve the next page of artifact results. */ @JsonProperty("next_page_token") private String nextPageToken; - /** Root artifact directory for the run. */ + /** The root artifact directory for the run. */ @JsonProperty("root_uri") private String rootUri; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogInputs.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogInputs.java index 4f1496809..9ae8ed0bd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogInputs.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogInputs.java @@ -14,6 +14,10 @@ public class LogInputs { @JsonProperty("datasets") private Collection datasets; + /** Model inputs */ + @JsonProperty("models") + private Collection models; + /** ID of the run to log under */ @JsonProperty("run_id") private String runId; @@ -27,6 +31,15 @@ public Collection getDatasets() { return datasets; } + public LogInputs setModels(Collection models) { + this.models = models; + return this; + } + + public Collection getModels() { + return models; + } + public LogInputs setRunId(String runId) { this.runId = runId; return this; @@ -41,16 +54,22 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; LogInputs that = (LogInputs) o; - return Objects.equals(datasets, that.datasets) && Objects.equals(runId, that.runId); + return Objects.equals(datasets, that.datasets) + && Objects.equals(models, that.models) + && Objects.equals(runId, that.runId); } @Override public int hashCode() { - return Objects.hash(datasets, runId); + return Objects.hash(datasets, models, runId); } @Override public String toString() { - return new ToStringer(LogInputs.class).add("datasets", datasets).add("runId", runId).toString(); + return new ToStringer(LogInputs.class) + .add("datasets", datasets) + .add("models", models) + .add("runId", runId) + .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetric.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetric.java index 5182201be..7e621f079 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetric.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetric.java @@ -9,10 +9,28 @@ @Generated public class LogMetric { + /** + * Dataset digest of the dataset associated with the metric, e.g. an md5 hash of the dataset that + * uniquely identifies it within datasets of the same name. + */ + @JsonProperty("dataset_digest") + private String datasetDigest; + + /** + * The name of the dataset associated with the metric. E.g. “my.uc.table@2” “nyc-taxi-dataset”, + * “fantastic-elk-3” + */ + @JsonProperty("dataset_name") + private String datasetName; + /** Name of the metric. */ @JsonProperty("key") private String key; + /** ID of the logged model associated with the metric, if applicable */ + @JsonProperty("model_id") + private String modelId; + /** ID of the run under which to log the metric. Must be provided. */ @JsonProperty("run_id") private String runId; @@ -36,6 +54,24 @@ public class LogMetric { @JsonProperty("value") private Double value; + public LogMetric setDatasetDigest(String datasetDigest) { + this.datasetDigest = datasetDigest; + return this; + } + + public String getDatasetDigest() { + return datasetDigest; + } + + public LogMetric setDatasetName(String datasetName) { + this.datasetName = datasetName; + return this; + } + + public String getDatasetName() { + return datasetName; + } + public LogMetric setKey(String key) { this.key = key; return this; @@ -45,6 +81,15 @@ public String getKey() { return key; } + public LogMetric setModelId(String modelId) { + this.modelId = modelId; + return this; + } + + public String getModelId() { + return modelId; + } + public LogMetric setRunId(String runId) { this.runId = runId; return this; @@ -95,7 +140,10 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; LogMetric that = (LogMetric) o; - return Objects.equals(key, that.key) + return Objects.equals(datasetDigest, that.datasetDigest) + && Objects.equals(datasetName, that.datasetName) + && Objects.equals(key, that.key) + && Objects.equals(modelId, that.modelId) && Objects.equals(runId, that.runId) && Objects.equals(runUuid, that.runUuid) && Objects.equals(step, that.step) @@ -105,13 +153,17 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(key, runId, runUuid, step, timestamp, value); + return Objects.hash( + datasetDigest, datasetName, key, modelId, runId, runUuid, step, timestamp, value); } @Override public String toString() { return new ToStringer(LogMetric.class) + .add("datasetDigest", datasetDigest) + .add("datasetName", datasetName) .add("key", key) + .add("modelId", modelId) .add("runId", runId) .add("runUuid", runUuid) .add("step", step) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Metric.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Metric.java index e4bcb3557..62e7340d9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Metric.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Metric.java @@ -10,22 +10,65 @@ /** Metric associated with a run, represented as a key-value pair. */ @Generated public class Metric { - /** Key identifying this metric. */ + /** + * The dataset digest of the dataset associated with the metric, e.g. an md5 hash of the dataset + * that uniquely identifies it within datasets of the same name. + */ + @JsonProperty("dataset_digest") + private String datasetDigest; + + /** + * The name of the dataset associated with the metric. E.g. “my.uc.table@2” “nyc-taxi-dataset”, + * “fantastic-elk-3” + */ + @JsonProperty("dataset_name") + private String datasetName; + + /** The key identifying the metric. */ @JsonProperty("key") private String key; - /** Step at which to log the metric. */ + /** + * The ID of the logged model or registered model version associated with the metric, if + * applicable. + */ + @JsonProperty("model_id") + private String modelId; + + /** The ID of the run containing the metric. */ + @JsonProperty("run_id") + private String runId; + + /** The step at which the metric was logged. */ @JsonProperty("step") private Long step; - /** The timestamp at which this metric was recorded. */ + /** The timestamp at which the metric was recorded. */ @JsonProperty("timestamp") private Long timestamp; - /** Value associated with this metric. */ + /** The value of the metric. */ @JsonProperty("value") private Double value; + public Metric setDatasetDigest(String datasetDigest) { + this.datasetDigest = datasetDigest; + return this; + } + + public String getDatasetDigest() { + return datasetDigest; + } + + public Metric setDatasetName(String datasetName) { + this.datasetName = datasetName; + return this; + } + + public String getDatasetName() { + return datasetName; + } + public Metric setKey(String key) { this.key = key; return this; @@ -35,6 +78,24 @@ public String getKey() { return key; } + public Metric setModelId(String modelId) { + this.modelId = modelId; + return this; + } + + public String getModelId() { + return modelId; + } + + public Metric setRunId(String runId) { + this.runId = runId; + return this; + } + + public String getRunId() { + return runId; + } + public Metric setStep(Long step) { this.step = step; return this; @@ -67,7 +128,11 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Metric that = (Metric) o; - return Objects.equals(key, that.key) + return Objects.equals(datasetDigest, that.datasetDigest) + && Objects.equals(datasetName, that.datasetName) + && Objects.equals(key, that.key) + && Objects.equals(modelId, that.modelId) + && Objects.equals(runId, that.runId) && Objects.equals(step, that.step) && Objects.equals(timestamp, that.timestamp) && Objects.equals(value, that.value); @@ -75,13 +140,17 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(key, step, timestamp, value); + return Objects.hash(datasetDigest, datasetName, key, modelId, runId, step, timestamp, value); } @Override public String toString() { return new ToStringer(Metric.class) + .add("datasetDigest", datasetDigest) + .add("datasetName", datasetName) .add("key", key) + .add("modelId", modelId) + .add("runId", runId) .add("step", step) .add("timestamp", timestamp) .add("value", value) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelInput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelInput.java new file mode 100755 index 000000000..be6cc4713 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelInput.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Represents a LoggedModel or Registered Model Version input to a Run. */ +@Generated +public class ModelInput { + /** The unique identifier of the model. */ + @JsonProperty("model_id") + private String modelId; + + public ModelInput setModelId(String modelId) { + this.modelId = modelId; + return this; + } + + public String getModelId() { + return modelId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ModelInput that = (ModelInput) o; + return Objects.equals(modelId, that.modelId); + } + + @Override + public int hashCode() { + return Objects.hash(modelId); + } + + @Override + public String toString() { + return new ToStringer(ModelInput.class).add("modelId", modelId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputs.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputs.java index b90b79df7..604f034f7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputs.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputs.java @@ -15,6 +15,15 @@ public class RunInputs { @JsonProperty("dataset_inputs") private Collection datasetInputs; + /** + * **NOTE**: Experimental: This API field may change or be removed in a future release without + * warning. + * + *

Model inputs to the Run. + */ + @JsonProperty("model_inputs") + private Collection modelInputs; + public RunInputs setDatasetInputs(Collection datasetInputs) { this.datasetInputs = datasetInputs; return this; @@ -24,21 +33,34 @@ public Collection getDatasetInputs() { return datasetInputs; } + public RunInputs setModelInputs(Collection modelInputs) { + this.modelInputs = modelInputs; + return this; + } + + public Collection getModelInputs() { + return modelInputs; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; RunInputs that = (RunInputs) o; - return Objects.equals(datasetInputs, that.datasetInputs); + return Objects.equals(datasetInputs, that.datasetInputs) + && Objects.equals(modelInputs, that.modelInputs); } @Override public int hashCode() { - return Objects.hash(datasetInputs); + return Objects.hash(datasetInputs, modelInputs); } @Override public String toString() { - return new ToStringer(RunInputs.class).add("datasetInputs", datasetInputs).toString(); + return new ToStringer(RunInputs.class) + .add("datasetInputs", datasetInputs) + .add("modelInputs", modelInputs) + .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java index 92117d392..aede0bea6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java @@ -44,51 +44,6 @@ public PipelinesAPI(PipelinesService mock) { impl = mock; } - public GetPipelineResponse waitGetPipelineRunning(String pipelineId) throws TimeoutException { - return waitGetPipelineRunning(pipelineId, Duration.ofMinutes(20), null); - } - - public GetPipelineResponse waitGetPipelineRunning( - String pipelineId, Duration timeout, Consumer callback) - throws TimeoutException { - long deadline = System.currentTimeMillis() + timeout.toMillis(); - java.util.List targetStates = Arrays.asList(PipelineState.RUNNING); - java.util.List failureStates = Arrays.asList(PipelineState.FAILED); - String statusMessage = "polling..."; - int attempt = 1; - while (System.currentTimeMillis() < deadline) { - GetPipelineResponse poll = get(new GetPipelineRequest().setPipelineId(pipelineId)); - PipelineState status = poll.getState(); - statusMessage = poll.getCause(); - if (targetStates.contains(status)) { - return poll; - } - if (callback != null) { - callback.accept(poll); - } - if (failureStates.contains(status)) { - String msg = String.format("failed to reach RUNNING, got %s: %s", status, statusMessage); - throw new IllegalStateException(msg); - } - - String prefix = String.format("pipelineId=%s", pipelineId); - int sleep = attempt; - if (sleep > 10) { - // sleep 10s max per attempt - sleep = 10; - } - LOG.info("{}: ({}) {} (sleeping ~{}s)", prefix, status, statusMessage, sleep); - try { - Thread.sleep((long) (sleep * 1000L + Math.random() * 1000)); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - throw new DatabricksException("Current thread was interrupted", e); - } - attempt++; - } - throw new TimeoutException(String.format("timed out after %s: %s", timeout, statusMessage)); - } - public GetPipelineResponse waitGetPipelineIdle(String pipelineId) throws TimeoutException { return waitGetPipelineIdle(pipelineId, Duration.ofMinutes(20), null); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreateServingEndpoint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreateServingEndpoint.java index 1e3ed8301..b02ca6ab8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreateServingEndpoint.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreateServingEndpoint.java @@ -11,8 +11,9 @@ @Generated public class CreateServingEndpoint { /** - * The AI Gateway configuration for the serving endpoint. NOTE: Only external model and - * provisioned throughput endpoints are currently supported. + * The AI Gateway configuration for the serving endpoint. NOTE: External model, provisioned + * throughput, and pay-per-token endpoints are fully supported; agent endpoints currently only + * support inference tables. */ @JsonProperty("ai_gateway") private AiGatewayConfig aiGateway; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpoint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpoint.java index d213e6dcb..9a0da78e1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpoint.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpoint.java @@ -11,8 +11,9 @@ @Generated public class ServingEndpoint { /** - * The AI Gateway configuration for the serving endpoint. NOTE: Only external model and - * provisioned throughput endpoints are currently supported. + * The AI Gateway configuration for the serving endpoint. NOTE: External model, provisioned + * throughput, and pay-per-token endpoints are fully supported; agent endpoints currently only + * support inference tables. */ @JsonProperty("ai_gateway") private AiGatewayConfig aiGateway; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointDetailed.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointDetailed.java index 2169edd4a..b01c6e572 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointDetailed.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointDetailed.java @@ -11,8 +11,9 @@ @Generated public class ServingEndpointDetailed { /** - * The AI Gateway configuration for the serving endpoint. NOTE: Only external model and - * provisioned throughput endpoints are currently supported. + * The AI Gateway configuration for the serving endpoint. NOTE: External model, provisioned + * throughput, and pay-per-token endpoints are fully supported; agent endpoints currently only + * support inference tables. */ @JsonProperty("ai_gateway") private AiGatewayConfig aiGateway; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java index 3e7e3347c..f044a180f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java @@ -251,8 +251,7 @@ public PutResponse put(String name) { /** * Update rate limits of a serving endpoint. * - *

Used to update the rate limits of a serving endpoint. NOTE: Only foundation model endpoints - * are currently supported. For external models, use AI Gateway to manage rate limits. + *

Deprecated: Please use AI Gateway to manage rate limits instead. */ public PutResponse put(PutRequest request) { return impl.put(request); @@ -265,8 +264,9 @@ public PutAiGatewayResponse putAiGateway(String name) { /** * Update AI Gateway of a serving endpoint. * - *

Used to update the AI Gateway of a serving endpoint. NOTE: Only external model and - * provisioned throughput endpoints are currently supported. + *

Used to update the AI Gateway of a serving endpoint. NOTE: External model, provisioned + * throughput, and pay-per-token endpoints are fully supported; agent endpoints currently only + * support inference tables. */ public PutAiGatewayResponse putAiGateway(PutAiGatewayRequest request) { return impl.putAiGateway(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java index a6c95cedd..42ee3b314 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java @@ -97,16 +97,16 @@ ServingEndpointPermissions getPermissions( /** * Update rate limits of a serving endpoint. * - *

Used to update the rate limits of a serving endpoint. NOTE: Only foundation model endpoints - * are currently supported. For external models, use AI Gateway to manage rate limits. + *

Deprecated: Please use AI Gateway to manage rate limits instead. */ PutResponse put(PutRequest putRequest); /** * Update AI Gateway of a serving endpoint. * - *

Used to update the AI Gateway of a serving endpoint. NOTE: Only external model and - * provisioned throughput endpoints are currently supported. + *

Used to update the AI Gateway of a serving endpoint. NOTE: External model, provisioned + * throughput, and pay-per-token endpoints are fully supported; agent endpoints currently only + * support inference tables. */ PutAiGatewayResponse putAiGateway(PutAiGatewayRequest putAiGatewayRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessAPI.java index 673a7b951..14651fa45 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessAPI.java @@ -9,10 +9,9 @@ /** * 'Disabling legacy access' has the following impacts: * - *

1. Disables direct access to the Hive Metastore. However, you can still access Hive Metastore - * through HMS Federation. 2. Disables Fallback Mode (docs link) on any External Location access - * from the workspace. 3. Alters DBFS path access to use External Location permissions in place of - * legacy credentials. 4. Enforces Unity Catalog access on all path based access. + *

1. Disables direct access to Hive Metastores from the workspace. However, you can still access + * a Hive Metastore through Hive Metastore federation. 2. Disables fallback mode on external + * location access from the workspace. 3. Disables Databricks Runtime versions prior to 13.3LTS. */ @Generated public class DisableLegacyAccessAPI { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessService.java index 60025ae59..a6b09e7c2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessService.java @@ -6,10 +6,9 @@ /** * 'Disabling legacy access' has the following impacts: * - *

1. Disables direct access to the Hive Metastore. However, you can still access Hive Metastore - * through HMS Federation. 2. Disables Fallback Mode (docs link) on any External Location access - * from the workspace. 3. Alters DBFS path access to use External Location permissions in place of - * legacy credentials. 4. Enforces Unity Catalog access on all path based access. + *

1. Disables direct access to Hive Metastores from the workspace. However, you can still access + * a Hive Metastore through Hive Metastore federation. 2. Disables fallback mode on external + * location access from the workspace. 3. Disables Databricks Runtime versions prior to 13.3LTS. * *

This is the high-level interface, that contains generated methods. * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebook.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebook.java new file mode 100755 index 000000000..615acaa97 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebook.java @@ -0,0 +1,64 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class EnableExportNotebook { + /** */ + @JsonProperty("boolean_val") + private BooleanMessage booleanVal; + + /** + * Name of the corresponding setting. This field is populated in the response, but it will not be + * respected even if it's set in the request body. The setting name in the path parameter will be + * respected instead. Setting name is required to be 'default' if the setting only has one + * instance per workspace. + */ + @JsonProperty("setting_name") + private String settingName; + + public EnableExportNotebook setBooleanVal(BooleanMessage booleanVal) { + this.booleanVal = booleanVal; + return this; + } + + public BooleanMessage getBooleanVal() { + return booleanVal; + } + + public EnableExportNotebook setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EnableExportNotebook that = (EnableExportNotebook) o; + return Objects.equals(booleanVal, that.booleanVal) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(booleanVal, settingName); + } + + @Override + public String toString() { + return new ToStringer(EnableExportNotebook.class) + .add("booleanVal", booleanVal) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebookAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebookAPI.java new file mode 100755 index 000000000..09fb0fb49 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebookAPI.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Controls whether users can export notebooks and files from the Workspace. By default, this + * setting is enabled. + */ +@Generated +public class EnableExportNotebookAPI { + private static final Logger LOG = LoggerFactory.getLogger(EnableExportNotebookAPI.class); + + private final EnableExportNotebookService impl; + + /** Regular-use constructor */ + public EnableExportNotebookAPI(ApiClient apiClient) { + impl = new EnableExportNotebookImpl(apiClient); + } + + /** Constructor for mocks */ + public EnableExportNotebookAPI(EnableExportNotebookService mock) { + impl = mock; + } + + /** + * Get the Enable Export Notebook setting. + * + *

Gets the Enable Export Notebook setting. + */ + public EnableExportNotebook getEnableExportNotebook() { + return impl.getEnableExportNotebook(); + } + + public EnableExportNotebook patchEnableExportNotebook( + boolean allowMissing, EnableExportNotebook setting, String fieldMask) { + return patchEnableExportNotebook( + new UpdateEnableExportNotebookRequest() + .setAllowMissing(allowMissing) + .setSetting(setting) + .setFieldMask(fieldMask)); + } + + /** + * Update the Enable Export Notebook setting. + * + *

Updates the Enable Export Notebook setting. The model follows eventual consistency, which + * means the get after the update operation might receive stale values for some time. + */ + public EnableExportNotebook patchEnableExportNotebook(UpdateEnableExportNotebookRequest request) { + return impl.patchEnableExportNotebook(request); + } + + public EnableExportNotebookService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebookImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebookImpl.java new file mode 100755 index 000000000..31eee1129 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebookImpl.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.support.Generated; +import java.io.IOException; + +/** Package-local implementation of EnableExportNotebook */ +@Generated +class EnableExportNotebookImpl implements EnableExportNotebookService { + private final ApiClient apiClient; + + public EnableExportNotebookImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public EnableExportNotebook getEnableExportNotebook() { + String path = "/api/2.0/settings/types/enable-export-notebook/names/default"; + try { + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, EnableExportNotebook.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public EnableExportNotebook patchEnableExportNotebook(UpdateEnableExportNotebookRequest request) { + String path = "/api/2.0/settings/types/enable-export-notebook/names/default"; + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, EnableExportNotebook.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebookService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebookService.java new file mode 100755 index 000000000..389a99032 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebookService.java @@ -0,0 +1,31 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +/** + * Controls whether users can export notebooks and files from the Workspace. By default, this + * setting is enabled. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface EnableExportNotebookService { + /** + * Get the Enable Export Notebook setting. + * + *

Gets the Enable Export Notebook setting. + */ + EnableExportNotebook getEnableExportNotebook(); + + /** + * Update the Enable Export Notebook setting. + * + *

Updates the Enable Export Notebook setting. The model follows eventual consistency, which + * means the get after the update operation might receive stale values for some time. + */ + EnableExportNotebook patchEnableExportNotebook( + UpdateEnableExportNotebookRequest updateEnableExportNotebookRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboard.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboard.java new file mode 100755 index 000000000..db002609e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboard.java @@ -0,0 +1,64 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class EnableNotebookTableClipboard { + /** */ + @JsonProperty("boolean_val") + private BooleanMessage booleanVal; + + /** + * Name of the corresponding setting. This field is populated in the response, but it will not be + * respected even if it's set in the request body. The setting name in the path parameter will be + * respected instead. Setting name is required to be 'default' if the setting only has one + * instance per workspace. + */ + @JsonProperty("setting_name") + private String settingName; + + public EnableNotebookTableClipboard setBooleanVal(BooleanMessage booleanVal) { + this.booleanVal = booleanVal; + return this; + } + + public BooleanMessage getBooleanVal() { + return booleanVal; + } + + public EnableNotebookTableClipboard setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EnableNotebookTableClipboard that = (EnableNotebookTableClipboard) o; + return Objects.equals(booleanVal, that.booleanVal) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(booleanVal, settingName); + } + + @Override + public String toString() { + return new ToStringer(EnableNotebookTableClipboard.class) + .add("booleanVal", booleanVal) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboardAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboardAPI.java new file mode 100755 index 000000000..209db42b8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboardAPI.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Controls whether users can copy tabular data to the clipboard via the UI. By default, this + * setting is enabled. + */ +@Generated +public class EnableNotebookTableClipboardAPI { + private static final Logger LOG = LoggerFactory.getLogger(EnableNotebookTableClipboardAPI.class); + + private final EnableNotebookTableClipboardService impl; + + /** Regular-use constructor */ + public EnableNotebookTableClipboardAPI(ApiClient apiClient) { + impl = new EnableNotebookTableClipboardImpl(apiClient); + } + + /** Constructor for mocks */ + public EnableNotebookTableClipboardAPI(EnableNotebookTableClipboardService mock) { + impl = mock; + } + + /** + * Get the Enable Notebook Table Clipboard setting. + * + *

Gets the Enable Notebook Table Clipboard setting. + */ + public EnableNotebookTableClipboard getEnableNotebookTableClipboard() { + return impl.getEnableNotebookTableClipboard(); + } + + public EnableNotebookTableClipboard patchEnableNotebookTableClipboard( + boolean allowMissing, EnableNotebookTableClipboard setting, String fieldMask) { + return patchEnableNotebookTableClipboard( + new UpdateEnableNotebookTableClipboardRequest() + .setAllowMissing(allowMissing) + .setSetting(setting) + .setFieldMask(fieldMask)); + } + + /** + * Update the Enable Notebook Table Clipboard setting. + * + *

Updates the Enable Notebook Table Clipboard setting. The model follows eventual consistency, + * which means the get after the update operation might receive stale values for some time. + */ + public EnableNotebookTableClipboard patchEnableNotebookTableClipboard( + UpdateEnableNotebookTableClipboardRequest request) { + return impl.patchEnableNotebookTableClipboard(request); + } + + public EnableNotebookTableClipboardService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboardImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboardImpl.java new file mode 100755 index 000000000..bcff25338 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboardImpl.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.support.Generated; +import java.io.IOException; + +/** Package-local implementation of EnableNotebookTableClipboard */ +@Generated +class EnableNotebookTableClipboardImpl implements EnableNotebookTableClipboardService { + private final ApiClient apiClient; + + public EnableNotebookTableClipboardImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public EnableNotebookTableClipboard getEnableNotebookTableClipboard() { + String path = "/api/2.0/settings/types/enable-notebook-table-clipboard/names/default"; + try { + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, EnableNotebookTableClipboard.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public EnableNotebookTableClipboard patchEnableNotebookTableClipboard( + UpdateEnableNotebookTableClipboardRequest request) { + String path = "/api/2.0/settings/types/enable-notebook-table-clipboard/names/default"; + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, EnableNotebookTableClipboard.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboardService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboardService.java new file mode 100755 index 000000000..73bdd8051 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableNotebookTableClipboardService.java @@ -0,0 +1,31 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +/** + * Controls whether users can copy tabular data to the clipboard via the UI. By default, this + * setting is enabled. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface EnableNotebookTableClipboardService { + /** + * Get the Enable Notebook Table Clipboard setting. + * + *

Gets the Enable Notebook Table Clipboard setting. + */ + EnableNotebookTableClipboard getEnableNotebookTableClipboard(); + + /** + * Update the Enable Notebook Table Clipboard setting. + * + *

Updates the Enable Notebook Table Clipboard setting. The model follows eventual consistency, + * which means the get after the update operation might receive stale values for some time. + */ + EnableNotebookTableClipboard patchEnableNotebookTableClipboard( + UpdateEnableNotebookTableClipboardRequest updateEnableNotebookTableClipboardRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloading.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloading.java new file mode 100755 index 000000000..990d5b733 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloading.java @@ -0,0 +1,64 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class EnableResultsDownloading { + /** */ + @JsonProperty("boolean_val") + private BooleanMessage booleanVal; + + /** + * Name of the corresponding setting. This field is populated in the response, but it will not be + * respected even if it's set in the request body. The setting name in the path parameter will be + * respected instead. Setting name is required to be 'default' if the setting only has one + * instance per workspace. + */ + @JsonProperty("setting_name") + private String settingName; + + public EnableResultsDownloading setBooleanVal(BooleanMessage booleanVal) { + this.booleanVal = booleanVal; + return this; + } + + public BooleanMessage getBooleanVal() { + return booleanVal; + } + + public EnableResultsDownloading setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EnableResultsDownloading that = (EnableResultsDownloading) o; + return Objects.equals(booleanVal, that.booleanVal) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(booleanVal, settingName); + } + + @Override + public String toString() { + return new ToStringer(EnableResultsDownloading.class) + .add("booleanVal", booleanVal) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloadingAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloadingAPI.java new file mode 100755 index 000000000..cf27e858d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloadingAPI.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Controls whether users can download notebook results. By default, this setting is enabled. */ +@Generated +public class EnableResultsDownloadingAPI { + private static final Logger LOG = LoggerFactory.getLogger(EnableResultsDownloadingAPI.class); + + private final EnableResultsDownloadingService impl; + + /** Regular-use constructor */ + public EnableResultsDownloadingAPI(ApiClient apiClient) { + impl = new EnableResultsDownloadingImpl(apiClient); + } + + /** Constructor for mocks */ + public EnableResultsDownloadingAPI(EnableResultsDownloadingService mock) { + impl = mock; + } + + /** + * Get the Enable Results Downloading setting. + * + *

Gets the Enable Results Downloading setting. + */ + public EnableResultsDownloading getEnableResultsDownloading() { + return impl.getEnableResultsDownloading(); + } + + public EnableResultsDownloading patchEnableResultsDownloading( + boolean allowMissing, EnableResultsDownloading setting, String fieldMask) { + return patchEnableResultsDownloading( + new UpdateEnableResultsDownloadingRequest() + .setAllowMissing(allowMissing) + .setSetting(setting) + .setFieldMask(fieldMask)); + } + + /** + * Update the Enable Results Downloading setting. + * + *

Updates the Enable Results Downloading setting. The model follows eventual consistency, + * which means the get after the update operation might receive stale values for some time. + */ + public EnableResultsDownloading patchEnableResultsDownloading( + UpdateEnableResultsDownloadingRequest request) { + return impl.patchEnableResultsDownloading(request); + } + + public EnableResultsDownloadingService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloadingImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloadingImpl.java new file mode 100755 index 000000000..8632b82dc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloadingImpl.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.support.Generated; +import java.io.IOException; + +/** Package-local implementation of EnableResultsDownloading */ +@Generated +class EnableResultsDownloadingImpl implements EnableResultsDownloadingService { + private final ApiClient apiClient; + + public EnableResultsDownloadingImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public EnableResultsDownloading getEnableResultsDownloading() { + String path = "/api/2.0/settings/types/enable-results-downloading/names/default"; + try { + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, EnableResultsDownloading.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public EnableResultsDownloading patchEnableResultsDownloading( + UpdateEnableResultsDownloadingRequest request) { + String path = "/api/2.0/settings/types/enable-results-downloading/names/default"; + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, EnableResultsDownloading.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloadingService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloadingService.java new file mode 100755 index 000000000..7cf41fdaf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableResultsDownloadingService.java @@ -0,0 +1,30 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +/** + * Controls whether users can download notebook results. By default, this setting is enabled. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface EnableResultsDownloadingService { + /** + * Get the Enable Results Downloading setting. + * + *

Gets the Enable Results Downloading setting. + */ + EnableResultsDownloading getEnableResultsDownloading(); + + /** + * Update the Enable Results Downloading setting. + * + *

Updates the Enable Results Downloading setting. The model follows eventual consistency, + * which means the get after the update operation might receive stale values for some time. + */ + EnableResultsDownloading patchEnableResultsDownloading( + UpdateEnableResultsDownloadingRequest updateEnableResultsDownloadingRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java index f1ba301de..9d67e7a5c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java @@ -27,6 +27,12 @@ public class SettingsAPI { private DisableLegacyDbfsAPI disableLegacyDbfsAPI; + private EnableExportNotebookAPI enableExportNotebookAPI; + + private EnableNotebookTableClipboardAPI enableNotebookTableClipboardAPI; + + private EnableResultsDownloadingAPI enableResultsDownloadingAPI; + private EnhancedSecurityMonitoringAPI enhancedSecurityMonitoringAPI; private RestrictWorkspaceAdminsAPI restrictWorkspaceAdminsAPI; @@ -50,6 +56,12 @@ public SettingsAPI(ApiClient apiClient) { disableLegacyDbfsAPI = new DisableLegacyDbfsAPI(apiClient); + enableExportNotebookAPI = new EnableExportNotebookAPI(apiClient); + + enableNotebookTableClipboardAPI = new EnableNotebookTableClipboardAPI(apiClient); + + enableResultsDownloadingAPI = new EnableResultsDownloadingAPI(apiClient); + enhancedSecurityMonitoringAPI = new EnhancedSecurityMonitoringAPI(apiClient); restrictWorkspaceAdminsAPI = new RestrictWorkspaceAdminsAPI(apiClient); @@ -104,6 +116,21 @@ public DisableLegacyDbfsAPI DisableLegacyDbfs() { return disableLegacyDbfsAPI; } + /** Controls whether users can export notebooks and files from the Workspace. */ + public EnableExportNotebookAPI EnableExportNotebook() { + return enableExportNotebookAPI; + } + + /** Controls whether users can copy tabular data to the clipboard via the UI. */ + public EnableNotebookTableClipboardAPI EnableNotebookTableClipboard() { + return enableNotebookTableClipboardAPI; + } + + /** Controls whether users can download notebook results. */ + public EnableResultsDownloadingAPI EnableResultsDownloading() { + return enableResultsDownloadingAPI; + } + /** Controls whether enhanced security monitoring is enabled for the current workspace. */ public EnhancedSecurityMonitoringAPI EnhancedSecurityMonitoring() { return enhancedSecurityMonitoringAPI; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableExportNotebookRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableExportNotebookRequest.java new file mode 100755 index 000000000..4e426ca38 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableExportNotebookRequest.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +public class UpdateEnableExportNotebookRequest { + /** This should always be set to true for Settings API. Added for AIP compliance. */ + @JsonProperty("allow_missing") + private Boolean allowMissing; + + /** + * The field mask must be a single string, with multiple fields separated by commas (no spaces). + * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not + * allowed, as only the entire collection field can be specified. Field names must exactly match + * the resource field names. + * + *

A field mask of `*` indicates full replacement. It’s recommended to always explicitly list + * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if + * the API changes in the future. + */ + @JsonProperty("field_mask") + private String fieldMask; + + /** */ + @JsonProperty("setting") + private EnableExportNotebook setting; + + public UpdateEnableExportNotebookRequest setAllowMissing(Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateEnableExportNotebookRequest setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateEnableExportNotebookRequest setSetting(EnableExportNotebook setting) { + this.setting = setting; + return this; + } + + public EnableExportNotebook getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateEnableExportNotebookRequest that = (UpdateEnableExportNotebookRequest) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateEnableExportNotebookRequest.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableNotebookTableClipboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableNotebookTableClipboardRequest.java new file mode 100755 index 000000000..008b93554 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableNotebookTableClipboardRequest.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +public class UpdateEnableNotebookTableClipboardRequest { + /** This should always be set to true for Settings API. Added for AIP compliance. */ + @JsonProperty("allow_missing") + private Boolean allowMissing; + + /** + * The field mask must be a single string, with multiple fields separated by commas (no spaces). + * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not + * allowed, as only the entire collection field can be specified. Field names must exactly match + * the resource field names. + * + *

A field mask of `*` indicates full replacement. It’s recommended to always explicitly list + * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if + * the API changes in the future. + */ + @JsonProperty("field_mask") + private String fieldMask; + + /** */ + @JsonProperty("setting") + private EnableNotebookTableClipboard setting; + + public UpdateEnableNotebookTableClipboardRequest setAllowMissing(Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateEnableNotebookTableClipboardRequest setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateEnableNotebookTableClipboardRequest setSetting( + EnableNotebookTableClipboard setting) { + this.setting = setting; + return this; + } + + public EnableNotebookTableClipboard getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateEnableNotebookTableClipboardRequest that = (UpdateEnableNotebookTableClipboardRequest) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateEnableNotebookTableClipboardRequest.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableResultsDownloadingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableResultsDownloadingRequest.java new file mode 100755 index 000000000..7311e8398 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnableResultsDownloadingRequest.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +public class UpdateEnableResultsDownloadingRequest { + /** This should always be set to true for Settings API. Added for AIP compliance. */ + @JsonProperty("allow_missing") + private Boolean allowMissing; + + /** + * The field mask must be a single string, with multiple fields separated by commas (no spaces). + * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not + * allowed, as only the entire collection field can be specified. Field names must exactly match + * the resource field names. + * + *

A field mask of `*` indicates full replacement. It’s recommended to always explicitly list + * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if + * the API changes in the future. + */ + @JsonProperty("field_mask") + private String fieldMask; + + /** */ + @JsonProperty("setting") + private EnableResultsDownloading setting; + + public UpdateEnableResultsDownloadingRequest setAllowMissing(Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateEnableResultsDownloadingRequest setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateEnableResultsDownloadingRequest setSetting(EnableResultsDownloading setting) { + this.setting = setting; + return this; + } + + public EnableResultsDownloading getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateEnableResultsDownloadingRequest that = (UpdateEnableResultsDownloadingRequest) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateEnableResultsDownloadingRequest.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryInfo.java index 86de0c875..da10c9e75 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryInfo.java @@ -13,6 +13,14 @@ public class QueryInfo { @JsonProperty("channel_used") private ChannelInfo channelUsed; + /** + * Client application that ran the statement. For example: Databricks SQL Editor, Tableau, and + * Power BI. This field is derived from information provided by client applications. While values + * are expected to remain static over time, this cannot be guaranteed. + */ + @JsonProperty("client_application") + private String clientApplication; + /** Total execution time of the statement ( excluding result fetch time ). */ @JsonProperty("duration") private Long duration; @@ -120,6 +128,15 @@ public ChannelInfo getChannelUsed() { return channelUsed; } + public QueryInfo setClientApplication(String clientApplication) { + this.clientApplication = clientApplication; + return this; + } + + public String getClientApplication() { + return clientApplication; + } + public QueryInfo setDuration(Long duration) { this.duration = duration; return this; @@ -324,6 +341,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; QueryInfo that = (QueryInfo) o; return Objects.equals(channelUsed, that.channelUsed) + && Objects.equals(clientApplication, that.clientApplication) && Objects.equals(duration, that.duration) && Objects.equals(endpointId, that.endpointId) && Objects.equals(errorMessage, that.errorMessage) @@ -352,6 +370,7 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( channelUsed, + clientApplication, duration, endpointId, errorMessage, @@ -380,6 +399,7 @@ public int hashCode() { public String toString() { return new ToStringer(QueryInfo.class) .add("channelUsed", channelUsed) + .add("clientApplication", clientApplication) .add("duration", duration) .add("endpointId", endpointId) .add("errorMessage", errorMessage) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissionLevel.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissionLevel.java index 18197e129..ad49b4477 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissionLevel.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissionLevel.java @@ -10,5 +10,6 @@ public enum WarehousePermissionLevel { CAN_MANAGE, CAN_MONITOR, CAN_USE, + CAN_VIEW, IS_OWNER, }