diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 6854c7f28..2eda20607 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -e1ea3f5ba0bc5b53be94f56535a67ba701a52a52 \ No newline at end of file +7e6c067aba534023c5036f76ad97b4a7b5ffab1c \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 7f68aa62c..5c35b0aab 100755 --- a/.gitattributes +++ b/.gitattributes @@ -2065,6 +2065,49 @@ /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateInfoState.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateStateInfo.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateStateInfoState.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Branch.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/BranchOperationMetadata.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateBranchOperation.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateBranchRequest.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateEndpointOperation.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateEndpointRequest.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateProjectOperation.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateProjectRequest.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DatabricksServiceExceptionWithDetailsProto.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteBranchRequest.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteEndpointRequest.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteProjectRequest.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Endpoint.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointOperationMetadata.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointPoolerMode.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointSettings.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointState.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointType.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ErrorCode.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/GetBranchRequest.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/GetEndpointRequest.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/GetOperationRequest.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/GetProjectRequest.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ListBranchesRequest.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ListBranchesResponse.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ListEndpointsRequest.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ListEndpointsResponse.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ListProjectsRequest.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ListProjectsResponse.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Operation.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/PostgresAPI.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/PostgresImpl.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/PostgresService.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Project.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectDefaultEndpointSettings.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectOperationMetadata.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectSettings.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateBranchOperation.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateBranchRequest.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateEndpointOperation.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateEndpointRequest.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateProjectOperation.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateProjectRequest.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AwsCredentials.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AwsKeyInfo.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/AzureKeyInfo.java linguist-generated=true diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 4e93fb492..d58a4e459 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -18,3 +18,14 @@ ### Internal Changes ### API Changes +* Add `com.databricks.sdk.service.postgres` package. +* Add `workspaceClient.postgres()` service. +* Add `effectiveUsagePolicyId` and `usagePolicyId` fields for `com.databricks.sdk.service.apps.App`. +* Add `externalAccessEnabled` field for `com.databricks.sdk.service.catalog.CreateAccountsMetastore`. +* Add `externalAccessEnabled` field for `com.databricks.sdk.service.catalog.CreateMetastore`. +* Add `externalAccessEnabled` field for `com.databricks.sdk.service.catalog.UpdateAccountsMetastore`. +* Add `externalAccessEnabled` field for `com.databricks.sdk.service.catalog.UpdateMetastore`. +* Add `errorMessage` field for `com.databricks.sdk.service.settings.CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule`. +* Add `errorMessage` field for `com.databricks.sdk.service.settings.NccAzurePrivateEndpointRule`. +* Add `CONTROL_PLANE_CONNECTION_FAILURE` and `CONTROL_PLANE_CONNECTION_FAILURE_DUE_TO_MISCONFIG` enum values for `com.databricks.sdk.service.compute.TerminationReasonCode`. +* Add `CONTROL_PLANE_CONNECTION_FAILURE` and `CONTROL_PLANE_CONNECTION_FAILURE_DUE_TO_MISCONFIG` enum values for `com.databricks.sdk.service.sql.TerminationReasonCode`. \ No newline at end of file diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index c0a600470..0ddab50a1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -176,6 +176,8 @@ import com.databricks.sdk.service.oauth2.ServicePrincipalSecretsProxyService; import com.databricks.sdk.service.pipelines.PipelinesAPI; import com.databricks.sdk.service.pipelines.PipelinesService; +import com.databricks.sdk.service.postgres.PostgresAPI; +import com.databricks.sdk.service.postgres.PostgresService; import com.databricks.sdk.service.qualitymonitorv2.QualityMonitorV2API; import com.databricks.sdk.service.qualitymonitorv2.QualityMonitorV2Service; import com.databricks.sdk.service.serving.ServingEndpointsAPI; @@ -330,6 +332,7 @@ public class WorkspaceClient { private PolicyComplianceForClustersAPI policyComplianceForClustersAPI; private PolicyComplianceForJobsAPI policyComplianceForJobsAPI; private PolicyFamiliesAPI policyFamiliesAPI; + private PostgresAPI postgresAPI; private ProviderExchangeFiltersAPI providerExchangeFiltersAPI; private ProviderExchangesAPI providerExchangesAPI; private ProviderFilesAPI providerFilesAPI; @@ -462,6 +465,7 @@ public WorkspaceClient(DatabricksConfig config) { policyComplianceForClustersAPI = new PolicyComplianceForClustersAPI(apiClient); policyComplianceForJobsAPI = new PolicyComplianceForJobsAPI(apiClient); policyFamiliesAPI = new PolicyFamiliesAPI(apiClient); + postgresAPI = new PostgresAPI(apiClient); providerExchangeFiltersAPI = new ProviderExchangeFiltersAPI(apiClient); providerExchangesAPI = new ProviderExchangesAPI(apiClient); providerFilesAPI = new ProviderFilesAPI(apiClient); @@ -1356,6 +1360,11 @@ public PolicyFamiliesAPI policyFamilies() { return policyFamiliesAPI; } + /** The Postgres API provides access to a Postgres database via REST API or direct SQL. */ + public PostgresAPI postgres() { + return postgresAPI; + } + /** Marketplace exchanges filters curate which groups can access an exchange. */ public ProviderExchangeFiltersAPI providerExchangeFilters() { return providerExchangeFiltersAPI; @@ -2882,6 +2891,17 @@ public WorkspaceClient withPolicyFamiliesAPI(PolicyFamiliesAPI policyFamilies) { return this; } + /** Replace the default PostgresService with a custom implementation. */ + public WorkspaceClient withPostgresImpl(PostgresService postgres) { + return this.withPostgresAPI(new PostgresAPI(postgres)); + } + + /** Replace the default PostgresAPI with a custom implementation. */ + public WorkspaceClient withPostgresAPI(PostgresAPI postgres) { + this.postgresAPI = postgres; + return this; + } + /** Replace the default ProviderExchangeFiltersService with a custom implementation. */ public WorkspaceClient withProviderExchangeFiltersImpl( ProviderExchangeFiltersService providerExchangeFilters) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java index 97dda72ac..d7ef3d38a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java @@ -56,6 +56,10 @@ public class App { @JsonProperty("effective_budget_policy_id") private String effectiveBudgetPolicyId; + /** */ + @JsonProperty("effective_usage_policy_id") + private String effectiveUsagePolicyId; + /** The effective api scopes granted to the user access token. */ @JsonProperty("effective_user_api_scopes") private Collection effectiveUserApiScopes; @@ -114,6 +118,10 @@ public class App { @JsonProperty("url") private String url; + /** */ + @JsonProperty("usage_policy_id") + private String usagePolicyId; + /** */ @JsonProperty("user_api_scopes") private Collection userApiScopes; @@ -208,6 +216,15 @@ public String getEffectiveBudgetPolicyId() { return effectiveBudgetPolicyId; } + public App setEffectiveUsagePolicyId(String effectiveUsagePolicyId) { + this.effectiveUsagePolicyId = effectiveUsagePolicyId; + return this; + } + + public String getEffectiveUsagePolicyId() { + return effectiveUsagePolicyId; + } + public App setEffectiveUserApiScopes(Collection effectiveUserApiScopes) { this.effectiveUserApiScopes = effectiveUserApiScopes; return this; @@ -325,6 +342,15 @@ public String getUrl() { return url; } + public App setUsagePolicyId(String usagePolicyId) { + this.usagePolicyId = usagePolicyId; + return this; + } + + public String getUsagePolicyId() { + return usagePolicyId; + } + public App setUserApiScopes(Collection userApiScopes) { this.userApiScopes = userApiScopes; return this; @@ -349,6 +375,7 @@ public boolean equals(Object o) { && Objects.equals(defaultSourceCodePath, that.defaultSourceCodePath) && Objects.equals(description, that.description) && Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId) + && Objects.equals(effectiveUsagePolicyId, that.effectiveUsagePolicyId) && Objects.equals(effectiveUserApiScopes, that.effectiveUserApiScopes) && Objects.equals(id, that.id) && Objects.equals(name, that.name) @@ -362,6 +389,7 @@ public boolean equals(Object o) { && Objects.equals(updateTime, that.updateTime) && Objects.equals(updater, that.updater) && Objects.equals(url, that.url) + && Objects.equals(usagePolicyId, that.usagePolicyId) && Objects.equals(userApiScopes, that.userApiScopes); } @@ -378,6 +406,7 @@ public int hashCode() { defaultSourceCodePath, description, effectiveBudgetPolicyId, + effectiveUsagePolicyId, effectiveUserApiScopes, id, name, @@ -391,6 +420,7 @@ public int hashCode() { updateTime, updater, url, + usagePolicyId, userApiScopes); } @@ -407,6 +437,7 @@ public String toString() { .add("defaultSourceCodePath", defaultSourceCodePath) .add("description", description) .add("effectiveBudgetPolicyId", effectiveBudgetPolicyId) + .add("effectiveUsagePolicyId", effectiveUsagePolicyId) .add("effectiveUserApiScopes", effectiveUserApiScopes) .add("id", id) .add("name", name) @@ -420,6 +451,7 @@ public String toString() { .add("updateTime", updateTime) .add("updater", updater) .add("url", url) + .add("usagePolicyId", usagePolicyId) .add("userApiScopes", userApiScopes) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java index b1ca2304f..933db59ac 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java @@ -4,7 +4,7 @@ import com.databricks.sdk.support.Generated; -/** Next Id: 51 */ +/** Next Id: 52 */ @Generated public enum ConnectionType { BIGQUERY, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccountsMetastore.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccountsMetastore.java index 2d82924c0..02658871b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccountsMetastore.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccountsMetastore.java @@ -9,6 +9,10 @@ @Generated public class CreateAccountsMetastore { + /** Whether to allow non-DBR clients to directly access entities under the metastore. */ + @JsonProperty("external_access_enabled") + private Boolean externalAccessEnabled; + /** The user-specified name of the metastore. */ @JsonProperty("name") private String name; @@ -21,6 +25,15 @@ public class CreateAccountsMetastore { @JsonProperty("storage_root") private String storageRoot; + public CreateAccountsMetastore setExternalAccessEnabled(Boolean externalAccessEnabled) { + this.externalAccessEnabled = externalAccessEnabled; + return this; + } + + public Boolean getExternalAccessEnabled() { + return externalAccessEnabled; + } + public CreateAccountsMetastore setName(String name) { this.name = name; return this; @@ -53,19 +66,21 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CreateAccountsMetastore that = (CreateAccountsMetastore) o; - return Objects.equals(name, that.name) + return Objects.equals(externalAccessEnabled, that.externalAccessEnabled) + && Objects.equals(name, that.name) && Objects.equals(region, that.region) && Objects.equals(storageRoot, that.storageRoot); } @Override public int hashCode() { - return Objects.hash(name, region, storageRoot); + return Objects.hash(externalAccessEnabled, name, region, storageRoot); } @Override public String toString() { return new ToStringer(CreateAccountsMetastore.class) + .add("externalAccessEnabled", externalAccessEnabled) .add("name", name) .add("region", region) .add("storageRoot", storageRoot) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastore.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastore.java index c10f826ca..091a5fc23 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastore.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastore.java @@ -9,6 +9,10 @@ @Generated public class CreateMetastore { + /** Whether to allow non-DBR clients to directly access entities under the metastore. */ + @JsonProperty("external_access_enabled") + private Boolean externalAccessEnabled; + /** The user-specified name of the metastore. */ @JsonProperty("name") private String name; @@ -21,6 +25,15 @@ public class CreateMetastore { @JsonProperty("storage_root") private String storageRoot; + public CreateMetastore setExternalAccessEnabled(Boolean externalAccessEnabled) { + this.externalAccessEnabled = externalAccessEnabled; + return this; + } + + public Boolean getExternalAccessEnabled() { + return externalAccessEnabled; + } + public CreateMetastore setName(String name) { this.name = name; return this; @@ -53,19 +66,21 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CreateMetastore that = (CreateMetastore) o; - return Objects.equals(name, that.name) + return Objects.equals(externalAccessEnabled, that.externalAccessEnabled) + && Objects.equals(name, that.name) && Objects.equals(region, that.region) && Objects.equals(storageRoot, that.storageRoot); } @Override public int hashCode() { - return Objects.hash(name, region, storageRoot); + return Objects.hash(externalAccessEnabled, name, region, storageRoot); } @Override public String toString() { return new ToStringer(CreateMetastore.class) + .add("externalAccessEnabled", externalAccessEnabled) .add("name", name) .add("region", region) .add("storageRoot", storageRoot) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java index 4d17a7fed..537945deb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java @@ -4,7 +4,7 @@ import com.databricks.sdk.support.Generated; -/** Latest kind: CONNECTION_CROWDSTRIKE_EVENT_STREAM_M2M = 281; Next id: 282 */ +/** Latest kind: CONNECTION_GOOGLE_ADS_OAUTH_U2M_WITH_DT = 284; Next id:285 */ @Generated public enum SecurableKind { TABLE_DB_STORAGE, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAccountsMetastore.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAccountsMetastore.java index 215f0eacf..edb988b14 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAccountsMetastore.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAccountsMetastore.java @@ -24,6 +24,10 @@ public class UpdateAccountsMetastore { @JsonProperty("delta_sharing_scope") private DeltaSharingScopeEnum deltaSharingScope; + /** Whether to allow non-DBR clients to directly access entities under the metastore. */ + @JsonProperty("external_access_enabled") + private Boolean externalAccessEnabled; + /** The owner of the metastore. */ @JsonProperty("owner") private String owner; @@ -65,6 +69,15 @@ public DeltaSharingScopeEnum getDeltaSharingScope() { return deltaSharingScope; } + public UpdateAccountsMetastore setExternalAccessEnabled(Boolean externalAccessEnabled) { + this.externalAccessEnabled = externalAccessEnabled; + return this; + } + + public Boolean getExternalAccessEnabled() { + return externalAccessEnabled; + } + public UpdateAccountsMetastore setOwner(String owner) { this.owner = owner; return this; @@ -102,6 +115,7 @@ public boolean equals(Object o) { deltaSharingRecipientTokenLifetimeInSeconds, that.deltaSharingRecipientTokenLifetimeInSeconds) && Objects.equals(deltaSharingScope, that.deltaSharingScope) + && Objects.equals(externalAccessEnabled, that.externalAccessEnabled) && Objects.equals(owner, that.owner) && Objects.equals(privilegeModelVersion, that.privilegeModelVersion) && Objects.equals(storageRootCredentialId, that.storageRootCredentialId); @@ -113,6 +127,7 @@ public int hashCode() { deltaSharingOrganizationName, deltaSharingRecipientTokenLifetimeInSeconds, deltaSharingScope, + externalAccessEnabled, owner, privilegeModelVersion, storageRootCredentialId); @@ -126,6 +141,7 @@ public String toString() { "deltaSharingRecipientTokenLifetimeInSeconds", deltaSharingRecipientTokenLifetimeInSeconds) .add("deltaSharingScope", deltaSharingScope) + .add("externalAccessEnabled", externalAccessEnabled) .add("owner", owner) .add("privilegeModelVersion", privilegeModelVersion) .add("storageRootCredentialId", storageRootCredentialId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastore.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastore.java index 3b80b364d..4c66f7533 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastore.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastore.java @@ -25,6 +25,10 @@ public class UpdateMetastore { @JsonProperty("delta_sharing_scope") private DeltaSharingScopeEnum deltaSharingScope; + /** Whether to allow non-DBR clients to directly access entities under the metastore. */ + @JsonProperty("external_access_enabled") + private Boolean externalAccessEnabled; + /** Unique ID of the metastore. */ @JsonIgnore private String id; @@ -72,6 +76,15 @@ public DeltaSharingScopeEnum getDeltaSharingScope() { return deltaSharingScope; } + public UpdateMetastore setExternalAccessEnabled(Boolean externalAccessEnabled) { + this.externalAccessEnabled = externalAccessEnabled; + return this; + } + + public Boolean getExternalAccessEnabled() { + return externalAccessEnabled; + } + public UpdateMetastore setId(String id) { this.id = id; return this; @@ -127,6 +140,7 @@ public boolean equals(Object o) { deltaSharingRecipientTokenLifetimeInSeconds, that.deltaSharingRecipientTokenLifetimeInSeconds) && Objects.equals(deltaSharingScope, that.deltaSharingScope) + && Objects.equals(externalAccessEnabled, that.externalAccessEnabled) && Objects.equals(id, that.id) && Objects.equals(newName, that.newName) && Objects.equals(owner, that.owner) @@ -140,6 +154,7 @@ public int hashCode() { deltaSharingOrganizationName, deltaSharingRecipientTokenLifetimeInSeconds, deltaSharingScope, + externalAccessEnabled, id, newName, owner, @@ -155,6 +170,7 @@ public String toString() { "deltaSharingRecipientTokenLifetimeInSeconds", deltaSharingRecipientTokenLifetimeInSeconds) .add("deltaSharingScope", deltaSharingScope) + .add("externalAccessEnabled", externalAccessEnabled) .add("id", id) .add("newName", newName) .add("owner", owner) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java index 8d1a96478..641c74395 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java @@ -59,6 +59,8 @@ public enum TerminationReasonCode { CLUSTER_OPERATION_TIMEOUT, COMMUNICATION_LOST, CONTAINER_LAUNCH_FAILURE, + CONTROL_PLANE_CONNECTION_FAILURE, + CONTROL_PLANE_CONNECTION_FAILURE_DUE_TO_MISCONFIG, CONTROL_PLANE_REQUEST_FAILURE, CONTROL_PLANE_REQUEST_FAILURE_DUE_TO_MISCONFIG, DATABASE_CONNECTION_FAILURE, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Branch.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Branch.java new file mode 100755 index 000000000..fe864bebe --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Branch.java @@ -0,0 +1,344 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.protobuf.Timestamp; +import java.util.Objects; + +@Generated +public class Branch { + /** A timestamp indicating when the branch was created. */ + @JsonProperty("create_time") + private Timestamp createTime; + + /** The branch's state, indicating if it is initializing, ready for use, or archived. */ + @JsonProperty("current_state") + private String currentState; + + /** + * Whether the branch is the project's default branch. This field is only returned on + * create/update responses. See effective_default for the value that is actually applied to the + * branch. + */ + @JsonProperty("default") + private Boolean defaultValue; + + /** Whether the branch is the project's default branch. */ + @JsonProperty("effective_default") + private Boolean effectiveDefault; + + /** Whether the branch is protected. */ + @JsonProperty("effective_is_protected") + private Boolean effectiveIsProtected; + + /** + * The name of the source branch from which this branch was created. Format: + * projects/{project_id}/branches/{branch_id} + */ + @JsonProperty("effective_source_branch") + private String effectiveSourceBranch; + + /** The Log Sequence Number (LSN) on the source branch from which this branch was created. */ + @JsonProperty("effective_source_branch_lsn") + private String effectiveSourceBranchLsn; + + /** The point in time on the source branch from which this branch was created. */ + @JsonProperty("effective_source_branch_time") + private Timestamp effectiveSourceBranchTime; + + /** Whether the branch is protected. */ + @JsonProperty("is_protected") + private Boolean isProtected; + + /** The logical size of the branch. */ + @JsonProperty("logical_size_bytes") + private Long logicalSizeBytes; + + /** The resource name of the branch. Format: projects/{project_id}/branches/{branch_id} */ + @JsonProperty("name") + private String name; + + /** The project containing this branch. Format: projects/{project_id} */ + @JsonProperty("parent") + private String parent; + + /** */ + @JsonProperty("pending_state") + private String pendingState; + + /** + * The name of the source branch from which this branch was created. Format: + * projects/{project_id}/branches/{branch_id} + */ + @JsonProperty("source_branch") + private String sourceBranch; + + /** The Log Sequence Number (LSN) on the source branch from which this branch was created. */ + @JsonProperty("source_branch_lsn") + private String sourceBranchLsn; + + /** The point in time on the source branch from which this branch was created. */ + @JsonProperty("source_branch_time") + private Timestamp sourceBranchTime; + + /** A timestamp indicating when the `current_state` began. */ + @JsonProperty("state_change_time") + private Timestamp stateChangeTime; + + /** System generated unique ID for the branch. */ + @JsonProperty("uid") + private String uid; + + /** A timestamp indicating when the branch was last updated. */ + @JsonProperty("update_time") + private Timestamp updateTime; + + public Branch setCreateTime(Timestamp createTime) { + this.createTime = createTime; + return this; + } + + public Timestamp getCreateTime() { + return createTime; + } + + public Branch setCurrentState(String currentState) { + this.currentState = currentState; + return this; + } + + public String getCurrentState() { + return currentState; + } + + public Branch setDefault(Boolean defaultValue) { + this.defaultValue = defaultValue; + return this; + } + + public Boolean getDefault() { + return defaultValue; + } + + public Branch setEffectiveDefault(Boolean effectiveDefault) { + this.effectiveDefault = effectiveDefault; + return this; + } + + public Boolean getEffectiveDefault() { + return effectiveDefault; + } + + public Branch setEffectiveIsProtected(Boolean effectiveIsProtected) { + this.effectiveIsProtected = effectiveIsProtected; + return this; + } + + public Boolean getEffectiveIsProtected() { + return effectiveIsProtected; + } + + public Branch setEffectiveSourceBranch(String effectiveSourceBranch) { + this.effectiveSourceBranch = effectiveSourceBranch; + return this; + } + + public String getEffectiveSourceBranch() { + return effectiveSourceBranch; + } + + public Branch setEffectiveSourceBranchLsn(String effectiveSourceBranchLsn) { + this.effectiveSourceBranchLsn = effectiveSourceBranchLsn; + return this; + } + + public String getEffectiveSourceBranchLsn() { + return effectiveSourceBranchLsn; + } + + public Branch setEffectiveSourceBranchTime(Timestamp effectiveSourceBranchTime) { + this.effectiveSourceBranchTime = effectiveSourceBranchTime; + return this; + } + + public Timestamp getEffectiveSourceBranchTime() { + return effectiveSourceBranchTime; + } + + public Branch setIsProtected(Boolean isProtected) { + this.isProtected = isProtected; + return this; + } + + public Boolean getIsProtected() { + return isProtected; + } + + public Branch setLogicalSizeBytes(Long logicalSizeBytes) { + this.logicalSizeBytes = logicalSizeBytes; + return this; + } + + public Long getLogicalSizeBytes() { + return logicalSizeBytes; + } + + public Branch setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public Branch setParent(String parent) { + this.parent = parent; + return this; + } + + public String getParent() { + return parent; + } + + public Branch setPendingState(String pendingState) { + this.pendingState = pendingState; + return this; + } + + public String getPendingState() { + return pendingState; + } + + public Branch setSourceBranch(String sourceBranch) { + this.sourceBranch = sourceBranch; + return this; + } + + public String getSourceBranch() { + return sourceBranch; + } + + public Branch setSourceBranchLsn(String sourceBranchLsn) { + this.sourceBranchLsn = sourceBranchLsn; + return this; + } + + public String getSourceBranchLsn() { + return sourceBranchLsn; + } + + public Branch setSourceBranchTime(Timestamp sourceBranchTime) { + this.sourceBranchTime = sourceBranchTime; + return this; + } + + public Timestamp getSourceBranchTime() { + return sourceBranchTime; + } + + public Branch setStateChangeTime(Timestamp stateChangeTime) { + this.stateChangeTime = stateChangeTime; + return this; + } + + public Timestamp getStateChangeTime() { + return stateChangeTime; + } + + public Branch setUid(String uid) { + this.uid = uid; + return this; + } + + public String getUid() { + return uid; + } + + public Branch setUpdateTime(Timestamp updateTime) { + this.updateTime = updateTime; + return this; + } + + public Timestamp getUpdateTime() { + return updateTime; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Branch that = (Branch) o; + return Objects.equals(createTime, that.createTime) + && Objects.equals(currentState, that.currentState) + && Objects.equals(defaultValue, that.defaultValue) + && Objects.equals(effectiveDefault, that.effectiveDefault) + && Objects.equals(effectiveIsProtected, that.effectiveIsProtected) + && Objects.equals(effectiveSourceBranch, that.effectiveSourceBranch) + && Objects.equals(effectiveSourceBranchLsn, that.effectiveSourceBranchLsn) + && Objects.equals(effectiveSourceBranchTime, that.effectiveSourceBranchTime) + && Objects.equals(isProtected, that.isProtected) + && Objects.equals(logicalSizeBytes, that.logicalSizeBytes) + && Objects.equals(name, that.name) + && Objects.equals(parent, that.parent) + && Objects.equals(pendingState, that.pendingState) + && Objects.equals(sourceBranch, that.sourceBranch) + && Objects.equals(sourceBranchLsn, that.sourceBranchLsn) + && Objects.equals(sourceBranchTime, that.sourceBranchTime) + && Objects.equals(stateChangeTime, that.stateChangeTime) + && Objects.equals(uid, that.uid) + && Objects.equals(updateTime, that.updateTime); + } + + @Override + public int hashCode() { + return Objects.hash( + createTime, + currentState, + defaultValue, + effectiveDefault, + effectiveIsProtected, + effectiveSourceBranch, + effectiveSourceBranchLsn, + effectiveSourceBranchTime, + isProtected, + logicalSizeBytes, + name, + parent, + pendingState, + sourceBranch, + sourceBranchLsn, + sourceBranchTime, + stateChangeTime, + uid, + updateTime); + } + + @Override + public String toString() { + return new ToStringer(Branch.class) + .add("createTime", createTime) + .add("currentState", currentState) + .add("defaultValue", defaultValue) + .add("effectiveDefault", effectiveDefault) + .add("effectiveIsProtected", effectiveIsProtected) + .add("effectiveSourceBranch", effectiveSourceBranch) + .add("effectiveSourceBranchLsn", effectiveSourceBranchLsn) + .add("effectiveSourceBranchTime", effectiveSourceBranchTime) + .add("isProtected", isProtected) + .add("logicalSizeBytes", logicalSizeBytes) + .add("name", name) + .add("parent", parent) + .add("pendingState", pendingState) + .add("sourceBranch", sourceBranch) + .add("sourceBranchLsn", sourceBranchLsn) + .add("sourceBranchTime", sourceBranchTime) + .add("stateChangeTime", stateChangeTime) + .add("uid", uid) + .add("updateTime", updateTime) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/BranchOperationMetadata.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/BranchOperationMetadata.java new file mode 100755 index 000000000..a2d50f189 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/BranchOperationMetadata.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +public class BranchOperationMetadata { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(BranchOperationMetadata.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateBranchOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateBranchOperation.java new file mode 100755 index 000000000..e29208573 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateBranchOperation.java @@ -0,0 +1,162 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.utils.SerDeUtils; +import com.databricks.sdk.service.common.lro.LroOptions; +import com.databricks.sdk.support.Generated; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.time.Duration; +import java.util.Optional; +import java.util.concurrent.TimeoutException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Wrapper for interacting with a long-running createBranch operation. Provides methods to wait for + * completion, check status, cancel, and access metadata. + */ +@Generated +public class CreateBranchOperation { + private static final Logger LOG = LoggerFactory.getLogger(CreateBranchOperation.class); + + private final PostgresService impl; + private Operation operation; + private final ObjectMapper objectMapper; + + public CreateBranchOperation(PostgresService impl, Operation operation) { + this.impl = impl; + this.operation = operation; + this.objectMapper = SerDeUtils.createMapper(); + } + + /** + * Wait for the operation to complete and return the resulting Branch. Waits indefinitely if no + * timeout is specified. + * + * @return the created Branch + * @throws TimeoutException if the operation doesn't complete within the timeout + * @throws DatabricksException if the operation fails + */ + public Branch waitForCompletion() throws TimeoutException { + return waitForCompletion(Optional.empty()); + } + + /** + * Wait for the operation to complete and return the resulting Branch. + * + * @param options the options for configuring the wait behavior, can be empty for defaults + * @return the created Branch + * @throws TimeoutException if the operation doesn't complete within the timeout + * @throws DatabricksException if the operation fails + */ + public Branch waitForCompletion(Optional options) throws TimeoutException { + Optional timeout = options.flatMap(LroOptions::getTimeout); + long deadline = + timeout.isPresent() + ? System.currentTimeMillis() + timeout.get().toMillis() + : Long.MAX_VALUE; + String statusMessage = "polling operation..."; + int attempt = 1; + + while (System.currentTimeMillis() < deadline) { + // Refresh the operation state + refreshOperation(); + + if (operation.getDone() != null && operation.getDone()) { + // Operation completed, check for success or failure + if (operation.getError() != null) { + String errorMsg = "unknown error"; + if (operation.getError().getMessage() != null + && !operation.getError().getMessage().isEmpty()) { + errorMsg = operation.getError().getMessage(); + } + + if (operation.getError().getErrorCode() != null) { + errorMsg = String.format("[%s] %s", operation.getError().getErrorCode(), errorMsg); + } + + throw new DatabricksException("Operation failed: " + errorMsg); + } + + // Operation completed successfully, unmarshal response + if (operation.getResponse() == null) { + throw new DatabricksException("Operation completed but no response available"); + } + + try { + JsonNode responseJson = objectMapper.valueToTree(operation.getResponse()); + return objectMapper.treeToValue(responseJson, Branch.class); + } catch (JsonProcessingException e) { + throw new DatabricksException( + "Failed to unmarshal branch response: " + e.getMessage(), e); + } + } + + // Operation still in progress, wait before polling again + String prefix = String.format("operation=%s", operation.getName()); + int sleep = Math.min(attempt, 10); // sleep 10s max per attempt + LOG.info("{}: operation in progress (sleeping ~{}s)", prefix, sleep); + + try { + Thread.sleep((long) (sleep * 1000L + Math.random() * 1000)); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new DatabricksException("Current thread was interrupted", e); + } + attempt++; + } + + String timeoutMessage = + timeout.isPresent() + ? String.format("Operation timed out after %s: %s", timeout.get(), statusMessage) + : String.format("Operation timed out: %s", statusMessage); + throw new TimeoutException(timeoutMessage); + } + + /** + * Get the operation name. + * + * @return the operation name + */ + public String getName() { + return operation.getName(); + } + + /** + * Get the operation metadata. + * + * @return the operation metadata, or null if not available + * @throws DatabricksException if the metadata cannot be deserialized + */ + public BranchOperationMetadata getMetadata() { + if (operation.getMetadata() == null) { + return null; + } + + try { + JsonNode metadataJson = objectMapper.valueToTree(operation.getMetadata()); + return objectMapper.treeToValue(metadataJson, BranchOperationMetadata.class); + } catch (JsonProcessingException e) { + throw new DatabricksException("Failed to unmarshal operation metadata: " + e.getMessage(), e); + } + } + + /** + * Check if the operation is done. This method refreshes the operation state before checking. + * + * @return true if the operation is complete, false otherwise + * @throws DatabricksException if the status check fails + */ + public boolean isDone() { + refreshOperation(); + return operation.getDone() != null && operation.getDone(); + } + + /** Refresh the operation state by polling the server. */ + private void refreshOperation() { + operation = impl.getOperation(new GetOperationRequest().setName(operation.getName())); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateBranchRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateBranchRequest.java new file mode 100755 index 000000000..005809912 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateBranchRequest.java @@ -0,0 +1,81 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateBranchRequest { + /** The Branch to create. */ + @JsonProperty("branch") + private Branch branch; + + /** + * The ID to use for the Branch, which will become the final component of the branch's resource + * name. + * + *

This value should be 4-63 characters, and valid characters are /[a-z][0-9]-/. + */ + @JsonIgnore + @QueryParam("branch_id") + private String branchId; + + /** The Project where this Branch will be created. Format: projects/{project_id} */ + @JsonIgnore private String parent; + + public CreateBranchRequest setBranch(Branch branch) { + this.branch = branch; + return this; + } + + public Branch getBranch() { + return branch; + } + + public CreateBranchRequest setBranchId(String branchId) { + this.branchId = branchId; + return this; + } + + public String getBranchId() { + return branchId; + } + + public CreateBranchRequest setParent(String parent) { + this.parent = parent; + return this; + } + + public String getParent() { + return parent; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateBranchRequest that = (CreateBranchRequest) o; + return Objects.equals(branch, that.branch) + && Objects.equals(branchId, that.branchId) + && Objects.equals(parent, that.parent); + } + + @Override + public int hashCode() { + return Objects.hash(branch, branchId, parent); + } + + @Override + public String toString() { + return new ToStringer(CreateBranchRequest.class) + .add("branch", branch) + .add("branchId", branchId) + .add("parent", parent) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateEndpointOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateEndpointOperation.java new file mode 100755 index 000000000..977aa855f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateEndpointOperation.java @@ -0,0 +1,162 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.utils.SerDeUtils; +import com.databricks.sdk.service.common.lro.LroOptions; +import com.databricks.sdk.support.Generated; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.time.Duration; +import java.util.Optional; +import java.util.concurrent.TimeoutException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Wrapper for interacting with a long-running createEndpoint operation. Provides methods to wait + * for completion, check status, cancel, and access metadata. + */ +@Generated +public class CreateEndpointOperation { + private static final Logger LOG = LoggerFactory.getLogger(CreateEndpointOperation.class); + + private final PostgresService impl; + private Operation operation; + private final ObjectMapper objectMapper; + + public CreateEndpointOperation(PostgresService impl, Operation operation) { + this.impl = impl; + this.operation = operation; + this.objectMapper = SerDeUtils.createMapper(); + } + + /** + * Wait for the operation to complete and return the resulting Endpoint. Waits indefinitely if no + * timeout is specified. + * + * @return the created Endpoint + * @throws TimeoutException if the operation doesn't complete within the timeout + * @throws DatabricksException if the operation fails + */ + public Endpoint waitForCompletion() throws TimeoutException { + return waitForCompletion(Optional.empty()); + } + + /** + * Wait for the operation to complete and return the resulting Endpoint. + * + * @param options the options for configuring the wait behavior, can be empty for defaults + * @return the created Endpoint + * @throws TimeoutException if the operation doesn't complete within the timeout + * @throws DatabricksException if the operation fails + */ + public Endpoint waitForCompletion(Optional options) throws TimeoutException { + Optional timeout = options.flatMap(LroOptions::getTimeout); + long deadline = + timeout.isPresent() + ? System.currentTimeMillis() + timeout.get().toMillis() + : Long.MAX_VALUE; + String statusMessage = "polling operation..."; + int attempt = 1; + + while (System.currentTimeMillis() < deadline) { + // Refresh the operation state + refreshOperation(); + + if (operation.getDone() != null && operation.getDone()) { + // Operation completed, check for success or failure + if (operation.getError() != null) { + String errorMsg = "unknown error"; + if (operation.getError().getMessage() != null + && !operation.getError().getMessage().isEmpty()) { + errorMsg = operation.getError().getMessage(); + } + + if (operation.getError().getErrorCode() != null) { + errorMsg = String.format("[%s] %s", operation.getError().getErrorCode(), errorMsg); + } + + throw new DatabricksException("Operation failed: " + errorMsg); + } + + // Operation completed successfully, unmarshal response + if (operation.getResponse() == null) { + throw new DatabricksException("Operation completed but no response available"); + } + + try { + JsonNode responseJson = objectMapper.valueToTree(operation.getResponse()); + return objectMapper.treeToValue(responseJson, Endpoint.class); + } catch (JsonProcessingException e) { + throw new DatabricksException( + "Failed to unmarshal endpoint response: " + e.getMessage(), e); + } + } + + // Operation still in progress, wait before polling again + String prefix = String.format("operation=%s", operation.getName()); + int sleep = Math.min(attempt, 10); // sleep 10s max per attempt + LOG.info("{}: operation in progress (sleeping ~{}s)", prefix, sleep); + + try { + Thread.sleep((long) (sleep * 1000L + Math.random() * 1000)); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new DatabricksException("Current thread was interrupted", e); + } + attempt++; + } + + String timeoutMessage = + timeout.isPresent() + ? String.format("Operation timed out after %s: %s", timeout.get(), statusMessage) + : String.format("Operation timed out: %s", statusMessage); + throw new TimeoutException(timeoutMessage); + } + + /** + * Get the operation name. + * + * @return the operation name + */ + public String getName() { + return operation.getName(); + } + + /** + * Get the operation metadata. + * + * @return the operation metadata, or null if not available + * @throws DatabricksException if the metadata cannot be deserialized + */ + public EndpointOperationMetadata getMetadata() { + if (operation.getMetadata() == null) { + return null; + } + + try { + JsonNode metadataJson = objectMapper.valueToTree(operation.getMetadata()); + return objectMapper.treeToValue(metadataJson, EndpointOperationMetadata.class); + } catch (JsonProcessingException e) { + throw new DatabricksException("Failed to unmarshal operation metadata: " + e.getMessage(), e); + } + } + + /** + * Check if the operation is done. This method refreshes the operation state before checking. + * + * @return true if the operation is complete, false otherwise + * @throws DatabricksException if the status check fails + */ + public boolean isDone() { + refreshOperation(); + return operation.getDone() != null && operation.getDone(); + } + + /** Refresh the operation state by polling the server. */ + private void refreshOperation() { + operation = impl.getOperation(new GetOperationRequest().setName(operation.getName())); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateEndpointRequest.java new file mode 100755 index 000000000..8097569ac --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateEndpointRequest.java @@ -0,0 +1,84 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateEndpointRequest { + /** The Endpoint to create. */ + @JsonProperty("endpoint") + private Endpoint endpoint; + + /** + * The ID to use for the Endpoint, which will become the final component of the endpoint's + * resource name. + * + *

This value should be 4-63 characters, and valid characters are /[a-z][0-9]-/. + */ + @JsonIgnore + @QueryParam("endpoint_id") + private String endpointId; + + /** + * The Branch where this Endpoint will be created. Format: + * projects/{project_id}/branches/{branch_id} + */ + @JsonIgnore private String parent; + + public CreateEndpointRequest setEndpoint(Endpoint endpoint) { + this.endpoint = endpoint; + return this; + } + + public Endpoint getEndpoint() { + return endpoint; + } + + public CreateEndpointRequest setEndpointId(String endpointId) { + this.endpointId = endpointId; + return this; + } + + public String getEndpointId() { + return endpointId; + } + + public CreateEndpointRequest setParent(String parent) { + this.parent = parent; + return this; + } + + public String getParent() { + return parent; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateEndpointRequest that = (CreateEndpointRequest) o; + return Objects.equals(endpoint, that.endpoint) + && Objects.equals(endpointId, that.endpointId) + && Objects.equals(parent, that.parent); + } + + @Override + public int hashCode() { + return Objects.hash(endpoint, endpointId, parent); + } + + @Override + public String toString() { + return new ToStringer(CreateEndpointRequest.class) + .add("endpoint", endpoint) + .add("endpointId", endpointId) + .add("parent", parent) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateProjectOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateProjectOperation.java new file mode 100755 index 000000000..9292706fe --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateProjectOperation.java @@ -0,0 +1,162 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.utils.SerDeUtils; +import com.databricks.sdk.service.common.lro.LroOptions; +import com.databricks.sdk.support.Generated; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.time.Duration; +import java.util.Optional; +import java.util.concurrent.TimeoutException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Wrapper for interacting with a long-running createProject operation. Provides methods to wait for + * completion, check status, cancel, and access metadata. + */ +@Generated +public class CreateProjectOperation { + private static final Logger LOG = LoggerFactory.getLogger(CreateProjectOperation.class); + + private final PostgresService impl; + private Operation operation; + private final ObjectMapper objectMapper; + + public CreateProjectOperation(PostgresService impl, Operation operation) { + this.impl = impl; + this.operation = operation; + this.objectMapper = SerDeUtils.createMapper(); + } + + /** + * Wait for the operation to complete and return the resulting Project. Waits indefinitely if no + * timeout is specified. + * + * @return the created Project + * @throws TimeoutException if the operation doesn't complete within the timeout + * @throws DatabricksException if the operation fails + */ + public Project waitForCompletion() throws TimeoutException { + return waitForCompletion(Optional.empty()); + } + + /** + * Wait for the operation to complete and return the resulting Project. + * + * @param options the options for configuring the wait behavior, can be empty for defaults + * @return the created Project + * @throws TimeoutException if the operation doesn't complete within the timeout + * @throws DatabricksException if the operation fails + */ + public Project waitForCompletion(Optional options) throws TimeoutException { + Optional timeout = options.flatMap(LroOptions::getTimeout); + long deadline = + timeout.isPresent() + ? System.currentTimeMillis() + timeout.get().toMillis() + : Long.MAX_VALUE; + String statusMessage = "polling operation..."; + int attempt = 1; + + while (System.currentTimeMillis() < deadline) { + // Refresh the operation state + refreshOperation(); + + if (operation.getDone() != null && operation.getDone()) { + // Operation completed, check for success or failure + if (operation.getError() != null) { + String errorMsg = "unknown error"; + if (operation.getError().getMessage() != null + && !operation.getError().getMessage().isEmpty()) { + errorMsg = operation.getError().getMessage(); + } + + if (operation.getError().getErrorCode() != null) { + errorMsg = String.format("[%s] %s", operation.getError().getErrorCode(), errorMsg); + } + + throw new DatabricksException("Operation failed: " + errorMsg); + } + + // Operation completed successfully, unmarshal response + if (operation.getResponse() == null) { + throw new DatabricksException("Operation completed but no response available"); + } + + try { + JsonNode responseJson = objectMapper.valueToTree(operation.getResponse()); + return objectMapper.treeToValue(responseJson, Project.class); + } catch (JsonProcessingException e) { + throw new DatabricksException( + "Failed to unmarshal project response: " + e.getMessage(), e); + } + } + + // Operation still in progress, wait before polling again + String prefix = String.format("operation=%s", operation.getName()); + int sleep = Math.min(attempt, 10); // sleep 10s max per attempt + LOG.info("{}: operation in progress (sleeping ~{}s)", prefix, sleep); + + try { + Thread.sleep((long) (sleep * 1000L + Math.random() * 1000)); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new DatabricksException("Current thread was interrupted", e); + } + attempt++; + } + + String timeoutMessage = + timeout.isPresent() + ? String.format("Operation timed out after %s: %s", timeout.get(), statusMessage) + : String.format("Operation timed out: %s", statusMessage); + throw new TimeoutException(timeoutMessage); + } + + /** + * Get the operation name. + * + * @return the operation name + */ + public String getName() { + return operation.getName(); + } + + /** + * Get the operation metadata. + * + * @return the operation metadata, or null if not available + * @throws DatabricksException if the metadata cannot be deserialized + */ + public ProjectOperationMetadata getMetadata() { + if (operation.getMetadata() == null) { + return null; + } + + try { + JsonNode metadataJson = objectMapper.valueToTree(operation.getMetadata()); + return objectMapper.treeToValue(metadataJson, ProjectOperationMetadata.class); + } catch (JsonProcessingException e) { + throw new DatabricksException("Failed to unmarshal operation metadata: " + e.getMessage(), e); + } + } + + /** + * Check if the operation is done. This method refreshes the operation state before checking. + * + * @return true if the operation is complete, false otherwise + * @throws DatabricksException if the status check fails + */ + public boolean isDone() { + refreshOperation(); + return operation.getDone() != null && operation.getDone(); + } + + /** Refresh the operation state by polling the server. */ + private void refreshOperation() { + operation = impl.getOperation(new GetOperationRequest().setName(operation.getName())); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateProjectRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateProjectRequest.java new file mode 100755 index 000000000..8983cf509 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateProjectRequest.java @@ -0,0 +1,66 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateProjectRequest { + /** The Project to create. */ + @JsonProperty("project") + private Project project; + + /** + * The ID to use for the Project, which will become the final component of the project's resource + * name. + * + *

This value should be 4-63 characters, and valid characters are /[a-z][0-9]-/. + */ + @JsonIgnore + @QueryParam("project_id") + private String projectId; + + public CreateProjectRequest setProject(Project project) { + this.project = project; + return this; + } + + public Project getProject() { + return project; + } + + public CreateProjectRequest setProjectId(String projectId) { + this.projectId = projectId; + return this; + } + + public String getProjectId() { + return projectId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateProjectRequest that = (CreateProjectRequest) o; + return Objects.equals(project, that.project) && Objects.equals(projectId, that.projectId); + } + + @Override + public int hashCode() { + return Objects.hash(project, projectId); + } + + @Override + public String toString() { + return new ToStringer(CreateProjectRequest.class) + .add("project", project) + .add("projectId", projectId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DatabricksServiceExceptionWithDetailsProto.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DatabricksServiceExceptionWithDetailsProto.java new file mode 100755 index 000000000..d9fcd43ed --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DatabricksServiceExceptionWithDetailsProto.java @@ -0,0 +1,94 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Databricks Error that is returned by all Databricks APIs. */ +@Generated +public class DatabricksServiceExceptionWithDetailsProto { + /** + * @pbjson-skip + */ + @JsonProperty("details") + private Collection details; + + /** */ + @JsonProperty("error_code") + private ErrorCode errorCode; + + /** */ + @JsonProperty("message") + private String message; + + /** */ + @JsonProperty("stack_trace") + private String stackTrace; + + public DatabricksServiceExceptionWithDetailsProto setDetails(Collection details) { + this.details = details; + return this; + } + + public Collection getDetails() { + return details; + } + + public DatabricksServiceExceptionWithDetailsProto setErrorCode(ErrorCode errorCode) { + this.errorCode = errorCode; + return this; + } + + public ErrorCode getErrorCode() { + return errorCode; + } + + public DatabricksServiceExceptionWithDetailsProto setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + public DatabricksServiceExceptionWithDetailsProto setStackTrace(String stackTrace) { + this.stackTrace = stackTrace; + return this; + } + + public String getStackTrace() { + return stackTrace; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DatabricksServiceExceptionWithDetailsProto that = + (DatabricksServiceExceptionWithDetailsProto) o; + return Objects.equals(details, that.details) + && Objects.equals(errorCode, that.errorCode) + && Objects.equals(message, that.message) + && Objects.equals(stackTrace, that.stackTrace); + } + + @Override + public int hashCode() { + return Objects.hash(details, errorCode, message, stackTrace); + } + + @Override + public String toString() { + return new ToStringer(DatabricksServiceExceptionWithDetailsProto.class) + .add("details", details) + .add("errorCode", errorCode) + .add("message", message) + .add("stackTrace", stackTrace) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteBranchRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteBranchRequest.java new file mode 100755 index 000000000..0a4cd332c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteBranchRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class DeleteBranchRequest { + /** The name of the Branch to delete. Format: projects/{project_id}/branches/{branch_id} */ + @JsonIgnore private String name; + + public DeleteBranchRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteBranchRequest that = (DeleteBranchRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(DeleteBranchRequest.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteEndpointRequest.java new file mode 100755 index 000000000..3882ab486 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteEndpointRequest.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class DeleteEndpointRequest { + /** + * The name of the Endpoint to delete. Format: + * projects/{project_id}/branches/{branch_id}/endpoints/{endpoint_id} + */ + @JsonIgnore private String name; + + public DeleteEndpointRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteEndpointRequest that = (DeleteEndpointRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(DeleteEndpointRequest.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteProjectRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteProjectRequest.java new file mode 100755 index 000000000..0b5ba0039 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteProjectRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class DeleteProjectRequest { + /** The name of the Project to delete. Format: projects/{project_id} */ + @JsonIgnore private String name; + + public DeleteProjectRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteProjectRequest that = (DeleteProjectRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(DeleteProjectRequest.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Endpoint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Endpoint.java new file mode 100755 index 000000000..b8adfb44f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Endpoint.java @@ -0,0 +1,429 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.protobuf.Duration; +import com.google.protobuf.Timestamp; +import java.util.Objects; + +@Generated +public class Endpoint { + /** The maximum number of Compute Units. */ + @JsonProperty("autoscaling_limit_max_cu") + private Double autoscalingLimitMaxCu; + + /** The minimum number of Compute Units. */ + @JsonProperty("autoscaling_limit_min_cu") + private Double autoscalingLimitMinCu; + + /** A timestamp indicating when the compute endpoint was created. */ + @JsonProperty("create_time") + private Timestamp createTime; + + /** */ + @JsonProperty("current_state") + private EndpointState currentState; + + /** + * Whether to restrict connections to the compute endpoint. Enabling this option schedules a + * suspend compute operation. A disabled compute endpoint cannot be enabled by a connection or + * console action. + */ + @JsonProperty("disabled") + private Boolean disabled; + + /** The maximum number of Compute Units. */ + @JsonProperty("effective_autoscaling_limit_max_cu") + private Double effectiveAutoscalingLimitMaxCu; + + /** The minimum number of Compute Units. */ + @JsonProperty("effective_autoscaling_limit_min_cu") + private Double effectiveAutoscalingLimitMinCu; + + /** + * Whether to restrict connections to the compute endpoint. Enabling this option schedules a + * suspend compute operation. A disabled compute endpoint cannot be enabled by a connection or + * console action. + */ + @JsonProperty("effective_disabled") + private Boolean effectiveDisabled; + + /** */ + @JsonProperty("effective_pooler_mode") + private EndpointPoolerMode effectivePoolerMode; + + /** */ + @JsonProperty("effective_settings") + private EndpointSettings effectiveSettings; + + /** Duration of inactivity after which the compute endpoint is automatically suspended. */ + @JsonProperty("effective_suspend_timeout_duration") + private Duration effectiveSuspendTimeoutDuration; + + /** The endpoint type. There could be only one READ_WRITE endpoint per branch. */ + @JsonProperty("endpoint_type") + private EndpointType endpointType; + + /** + * The hostname of the compute endpoint. This is the hostname specified when connecting to a + * database. + */ + @JsonProperty("host") + private String host; + + /** A timestamp indicating when the compute endpoint was last active. */ + @JsonProperty("last_active_time") + private Timestamp lastActiveTime; + + /** + * The resource name of the endpoint. Format: + * projects/{project_id}/branches/{branch_id}/endpoints/{endpoint_id} + */ + @JsonProperty("name") + private String name; + + /** The branch containing this endpoint. Format: projects/{project_id}/branches/{branch_id} */ + @JsonProperty("parent") + private String parent; + + /** */ + @JsonProperty("pending_state") + private EndpointState pendingState; + + /** */ + @JsonProperty("pooler_mode") + private EndpointPoolerMode poolerMode; + + /** */ + @JsonProperty("settings") + private EndpointSettings settings; + + /** A timestamp indicating when the compute endpoint was last started. */ + @JsonProperty("start_time") + private Timestamp startTime; + + /** A timestamp indicating when the compute endpoint was last suspended. */ + @JsonProperty("suspend_time") + private Timestamp suspendTime; + + /** Duration of inactivity after which the compute endpoint is automatically suspended. */ + @JsonProperty("suspend_timeout_duration") + private Duration suspendTimeoutDuration; + + /** System generated unique ID for the endpoint. */ + @JsonProperty("uid") + private String uid; + + /** A timestamp indicating when the compute endpoint was last updated. */ + @JsonProperty("update_time") + private Timestamp updateTime; + + public Endpoint setAutoscalingLimitMaxCu(Double autoscalingLimitMaxCu) { + this.autoscalingLimitMaxCu = autoscalingLimitMaxCu; + return this; + } + + public Double getAutoscalingLimitMaxCu() { + return autoscalingLimitMaxCu; + } + + public Endpoint setAutoscalingLimitMinCu(Double autoscalingLimitMinCu) { + this.autoscalingLimitMinCu = autoscalingLimitMinCu; + return this; + } + + public Double getAutoscalingLimitMinCu() { + return autoscalingLimitMinCu; + } + + public Endpoint setCreateTime(Timestamp createTime) { + this.createTime = createTime; + return this; + } + + public Timestamp getCreateTime() { + return createTime; + } + + public Endpoint setCurrentState(EndpointState currentState) { + this.currentState = currentState; + return this; + } + + public EndpointState getCurrentState() { + return currentState; + } + + public Endpoint setDisabled(Boolean disabled) { + this.disabled = disabled; + return this; + } + + public Boolean getDisabled() { + return disabled; + } + + public Endpoint setEffectiveAutoscalingLimitMaxCu(Double effectiveAutoscalingLimitMaxCu) { + this.effectiveAutoscalingLimitMaxCu = effectiveAutoscalingLimitMaxCu; + return this; + } + + public Double getEffectiveAutoscalingLimitMaxCu() { + return effectiveAutoscalingLimitMaxCu; + } + + public Endpoint setEffectiveAutoscalingLimitMinCu(Double effectiveAutoscalingLimitMinCu) { + this.effectiveAutoscalingLimitMinCu = effectiveAutoscalingLimitMinCu; + return this; + } + + public Double getEffectiveAutoscalingLimitMinCu() { + return effectiveAutoscalingLimitMinCu; + } + + public Endpoint setEffectiveDisabled(Boolean effectiveDisabled) { + this.effectiveDisabled = effectiveDisabled; + return this; + } + + public Boolean getEffectiveDisabled() { + return effectiveDisabled; + } + + public Endpoint setEffectivePoolerMode(EndpointPoolerMode effectivePoolerMode) { + this.effectivePoolerMode = effectivePoolerMode; + return this; + } + + public EndpointPoolerMode getEffectivePoolerMode() { + return effectivePoolerMode; + } + + public Endpoint setEffectiveSettings(EndpointSettings effectiveSettings) { + this.effectiveSettings = effectiveSettings; + return this; + } + + public EndpointSettings getEffectiveSettings() { + return effectiveSettings; + } + + public Endpoint setEffectiveSuspendTimeoutDuration(Duration effectiveSuspendTimeoutDuration) { + this.effectiveSuspendTimeoutDuration = effectiveSuspendTimeoutDuration; + return this; + } + + public Duration getEffectiveSuspendTimeoutDuration() { + return effectiveSuspendTimeoutDuration; + } + + public Endpoint setEndpointType(EndpointType endpointType) { + this.endpointType = endpointType; + return this; + } + + public EndpointType getEndpointType() { + return endpointType; + } + + public Endpoint setHost(String host) { + this.host = host; + return this; + } + + public String getHost() { + return host; + } + + public Endpoint setLastActiveTime(Timestamp lastActiveTime) { + this.lastActiveTime = lastActiveTime; + return this; + } + + public Timestamp getLastActiveTime() { + return lastActiveTime; + } + + public Endpoint setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public Endpoint setParent(String parent) { + this.parent = parent; + return this; + } + + public String getParent() { + return parent; + } + + public Endpoint setPendingState(EndpointState pendingState) { + this.pendingState = pendingState; + return this; + } + + public EndpointState getPendingState() { + return pendingState; + } + + public Endpoint setPoolerMode(EndpointPoolerMode poolerMode) { + this.poolerMode = poolerMode; + return this; + } + + public EndpointPoolerMode getPoolerMode() { + return poolerMode; + } + + public Endpoint setSettings(EndpointSettings settings) { + this.settings = settings; + return this; + } + + public EndpointSettings getSettings() { + return settings; + } + + public Endpoint setStartTime(Timestamp startTime) { + this.startTime = startTime; + return this; + } + + public Timestamp getStartTime() { + return startTime; + } + + public Endpoint setSuspendTime(Timestamp suspendTime) { + this.suspendTime = suspendTime; + return this; + } + + public Timestamp getSuspendTime() { + return suspendTime; + } + + public Endpoint setSuspendTimeoutDuration(Duration suspendTimeoutDuration) { + this.suspendTimeoutDuration = suspendTimeoutDuration; + return this; + } + + public Duration getSuspendTimeoutDuration() { + return suspendTimeoutDuration; + } + + public Endpoint setUid(String uid) { + this.uid = uid; + return this; + } + + public String getUid() { + return uid; + } + + public Endpoint setUpdateTime(Timestamp updateTime) { + this.updateTime = updateTime; + return this; + } + + public Timestamp getUpdateTime() { + return updateTime; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Endpoint that = (Endpoint) o; + return Objects.equals(autoscalingLimitMaxCu, that.autoscalingLimitMaxCu) + && Objects.equals(autoscalingLimitMinCu, that.autoscalingLimitMinCu) + && Objects.equals(createTime, that.createTime) + && Objects.equals(currentState, that.currentState) + && Objects.equals(disabled, that.disabled) + && Objects.equals(effectiveAutoscalingLimitMaxCu, that.effectiveAutoscalingLimitMaxCu) + && Objects.equals(effectiveAutoscalingLimitMinCu, that.effectiveAutoscalingLimitMinCu) + && Objects.equals(effectiveDisabled, that.effectiveDisabled) + && Objects.equals(effectivePoolerMode, that.effectivePoolerMode) + && Objects.equals(effectiveSettings, that.effectiveSettings) + && Objects.equals(effectiveSuspendTimeoutDuration, that.effectiveSuspendTimeoutDuration) + && Objects.equals(endpointType, that.endpointType) + && Objects.equals(host, that.host) + && Objects.equals(lastActiveTime, that.lastActiveTime) + && Objects.equals(name, that.name) + && Objects.equals(parent, that.parent) + && Objects.equals(pendingState, that.pendingState) + && Objects.equals(poolerMode, that.poolerMode) + && Objects.equals(settings, that.settings) + && Objects.equals(startTime, that.startTime) + && Objects.equals(suspendTime, that.suspendTime) + && Objects.equals(suspendTimeoutDuration, that.suspendTimeoutDuration) + && Objects.equals(uid, that.uid) + && Objects.equals(updateTime, that.updateTime); + } + + @Override + public int hashCode() { + return Objects.hash( + autoscalingLimitMaxCu, + autoscalingLimitMinCu, + createTime, + currentState, + disabled, + effectiveAutoscalingLimitMaxCu, + effectiveAutoscalingLimitMinCu, + effectiveDisabled, + effectivePoolerMode, + effectiveSettings, + effectiveSuspendTimeoutDuration, + endpointType, + host, + lastActiveTime, + name, + parent, + pendingState, + poolerMode, + settings, + startTime, + suspendTime, + suspendTimeoutDuration, + uid, + updateTime); + } + + @Override + public String toString() { + return new ToStringer(Endpoint.class) + .add("autoscalingLimitMaxCu", autoscalingLimitMaxCu) + .add("autoscalingLimitMinCu", autoscalingLimitMinCu) + .add("createTime", createTime) + .add("currentState", currentState) + .add("disabled", disabled) + .add("effectiveAutoscalingLimitMaxCu", effectiveAutoscalingLimitMaxCu) + .add("effectiveAutoscalingLimitMinCu", effectiveAutoscalingLimitMinCu) + .add("effectiveDisabled", effectiveDisabled) + .add("effectivePoolerMode", effectivePoolerMode) + .add("effectiveSettings", effectiveSettings) + .add("effectiveSuspendTimeoutDuration", effectiveSuspendTimeoutDuration) + .add("endpointType", endpointType) + .add("host", host) + .add("lastActiveTime", lastActiveTime) + .add("name", name) + .add("parent", parent) + .add("pendingState", pendingState) + .add("poolerMode", poolerMode) + .add("settings", settings) + .add("startTime", startTime) + .add("suspendTime", suspendTime) + .add("suspendTimeoutDuration", suspendTimeoutDuration) + .add("uid", uid) + .add("updateTime", updateTime) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointOperationMetadata.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointOperationMetadata.java new file mode 100755 index 000000000..377a9ed7c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointOperationMetadata.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +public class EndpointOperationMetadata { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(EndpointOperationMetadata.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointPoolerMode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointPoolerMode.java new file mode 100755 index 000000000..061d5b86f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointPoolerMode.java @@ -0,0 +1,11 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; + +/** The connection pooler mode. Lakebase supports PgBouncer in `transaction` mode only. */ +@Generated +public enum EndpointPoolerMode { + TRANSACTION, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointSettings.java new file mode 100755 index 000000000..007a645ce --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointSettings.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +/** A collection of settings for a compute endpoint. */ +@Generated +public class EndpointSettings { + /** A raw representation of Postgres settings. */ + @JsonProperty("pg_settings") + private Map pgSettings; + + /** A raw representation of PgBouncer settings. */ + @JsonProperty("pgbouncer_settings") + private Map pgbouncerSettings; + + public EndpointSettings setPgSettings(Map pgSettings) { + this.pgSettings = pgSettings; + return this; + } + + public Map getPgSettings() { + return pgSettings; + } + + public EndpointSettings setPgbouncerSettings(Map pgbouncerSettings) { + this.pgbouncerSettings = pgbouncerSettings; + return this; + } + + public Map getPgbouncerSettings() { + return pgbouncerSettings; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EndpointSettings that = (EndpointSettings) o; + return Objects.equals(pgSettings, that.pgSettings) + && Objects.equals(pgbouncerSettings, that.pgbouncerSettings); + } + + @Override + public int hashCode() { + return Objects.hash(pgSettings, pgbouncerSettings); + } + + @Override + public String toString() { + return new ToStringer(EndpointSettings.class) + .add("pgSettings", pgSettings) + .add("pgbouncerSettings", pgbouncerSettings) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointState.java new file mode 100755 index 000000000..b0d141670 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointState.java @@ -0,0 +1,13 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; + +/** The state of the compute endpoint. */ +@Generated +public enum EndpointState { + ACTIVE, + IDLE, + INIT, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointType.java new file mode 100755 index 000000000..a548140e3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointType.java @@ -0,0 +1,12 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; + +/** The compute endpoint type. Either `read_write` or `read_only`. */ +@Generated +public enum EndpointType { + READ_ONLY, + READ_WRITE, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ErrorCode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ErrorCode.java new file mode 100755 index 000000000..c59a6d229 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ErrorCode.java @@ -0,0 +1,95 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; + +/** + * Legacy definition of the ErrorCode enum. Please keep in sync with api-base/proto/error_code.proto + * (except status code mapping annotations as this file doesn't have them). Will be removed + * eventually, pending the ScalaPB 0.4 cleanup. + */ +@Generated +public enum ErrorCode { + ABORTED, + ALREADY_EXISTS, + BAD_REQUEST, + CANCELLED, + CATALOG_ALREADY_EXISTS, + CATALOG_DOES_NOT_EXIST, + CATALOG_NOT_EMPTY, + COULD_NOT_ACQUIRE_LOCK, + CUSTOMER_UNAUTHORIZED, + DAC_ALREADY_EXISTS, + DAC_DOES_NOT_EXIST, + DATA_LOSS, + DEADLINE_EXCEEDED, + DEPLOYMENT_TIMEOUT, + DIRECTORY_NOT_EMPTY, + DIRECTORY_PROTECTED, + DRY_RUN_FAILED, + ENDPOINT_NOT_FOUND, + EXTERNAL_LOCATION_ALREADY_EXISTS, + EXTERNAL_LOCATION_DOES_NOT_EXIST, + FEATURE_DISABLED, + GIT_CONFLICT, + GIT_REMOTE_ERROR, + GIT_SENSITIVE_TOKEN_DETECTED, + GIT_UNKNOWN_REF, + GIT_URL_NOT_ON_ALLOW_LIST, + INSECURE_PARTNER_RESPONSE, + INTERNAL_ERROR, + INVALID_PARAMETER_VALUE, + INVALID_STATE, + INVALID_STATE_TRANSITION, + IO_ERROR, + IPYNB_FILE_IN_REPO, + MALFORMED_PARTNER_RESPONSE, + MALFORMED_REQUEST, + MANAGED_RESOURCE_GROUP_DOES_NOT_EXIST, + MAX_BLOCK_SIZE_EXCEEDED, + MAX_CHILD_NODE_SIZE_EXCEEDED, + MAX_LIST_SIZE_EXCEEDED, + MAX_NOTEBOOK_SIZE_EXCEEDED, + MAX_READ_SIZE_EXCEEDED, + METASTORE_ALREADY_EXISTS, + METASTORE_DOES_NOT_EXIST, + METASTORE_NOT_EMPTY, + NOT_FOUND, + NOT_IMPLEMENTED, + PARTIAL_DELETE, + PERMISSION_DENIED, + PERMISSION_NOT_PROPAGATED, + PRINCIPAL_DOES_NOT_EXIST, + PROJECTS_OPERATION_TIMEOUT, + PROVIDER_ALREADY_EXISTS, + PROVIDER_DOES_NOT_EXIST, + PROVIDER_SHARE_NOT_ACCESSIBLE, + QUOTA_EXCEEDED, + RECIPIENT_ALREADY_EXISTS, + RECIPIENT_DOES_NOT_EXIST, + REQUEST_LIMIT_EXCEEDED, + RESOURCE_ALREADY_EXISTS, + RESOURCE_CONFLICT, + RESOURCE_DOES_NOT_EXIST, + RESOURCE_EXHAUSTED, + RESOURCE_LIMIT_EXCEEDED, + SCHEMA_ALREADY_EXISTS, + SCHEMA_DOES_NOT_EXIST, + SCHEMA_NOT_EMPTY, + SEARCH_QUERY_TOO_LONG, + SEARCH_QUERY_TOO_SHORT, + SERVICE_UNDER_MAINTENANCE, + SHARE_ALREADY_EXISTS, + SHARE_DOES_NOT_EXIST, + STORAGE_CREDENTIAL_ALREADY_EXISTS, + STORAGE_CREDENTIAL_DOES_NOT_EXIST, + TABLE_ALREADY_EXISTS, + TABLE_DOES_NOT_EXIST, + TEMPORARILY_UNAVAILABLE, + UNAUTHENTICATED, + UNAVAILABLE, + UNKNOWN, + UNPARSEABLE_HTTP_ERROR, + WORKSPACE_TEMPORARILY_UNAVAILABLE, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/GetBranchRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/GetBranchRequest.java new file mode 100755 index 000000000..9b52fea00 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/GetBranchRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetBranchRequest { + /** The name of the Branch to retrieve. Format: projects/{project_id}/branches/{branch_id} */ + @JsonIgnore private String name; + + public GetBranchRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetBranchRequest that = (GetBranchRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetBranchRequest.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/GetEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/GetEndpointRequest.java new file mode 100755 index 000000000..167786afe --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/GetEndpointRequest.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetEndpointRequest { + /** + * The name of the Endpoint to retrieve. Format: + * projects/{project_id}/branches/{branch_id}/endpoints/{endpoint_id} + */ + @JsonIgnore private String name; + + public GetEndpointRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetEndpointRequest that = (GetEndpointRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetEndpointRequest.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/GetOperationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/GetOperationRequest.java new file mode 100755 index 000000000..38e5b869f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/GetOperationRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetOperationRequest { + /** The name of the operation resource. */ + @JsonIgnore private String name; + + public GetOperationRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetOperationRequest that = (GetOperationRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetOperationRequest.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/GetProjectRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/GetProjectRequest.java new file mode 100755 index 000000000..540c715a2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/GetProjectRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetProjectRequest { + /** The name of the Project to retrieve. Format: projects/{project_id} */ + @JsonIgnore private String name; + + public GetProjectRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetProjectRequest that = (GetProjectRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetProjectRequest.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ListBranchesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ListBranchesRequest.java new file mode 100755 index 000000000..8737561cd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ListBranchesRequest.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class ListBranchesRequest { + /** Upper bound for items returned. */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** Pagination token to go to the next page of Branches. Requests first page if absent. */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + /** The Project that owns this collection of branches. Format: projects/{project_id} */ + @JsonIgnore private String parent; + + public ListBranchesRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListBranchesRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListBranchesRequest setParent(String parent) { + this.parent = parent; + return this; + } + + public String getParent() { + return parent; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListBranchesRequest that = (ListBranchesRequest) o; + return Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(parent, that.parent); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken, parent); + } + + @Override + public String toString() { + return new ToStringer(ListBranchesRequest.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .add("parent", parent) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ListBranchesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ListBranchesResponse.java new file mode 100755 index 000000000..72cce9a10 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ListBranchesResponse.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListBranchesResponse { + /** List of branches. */ + @JsonProperty("branches") + private Collection branches; + + /** Pagination token to request the next page of branches. */ + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListBranchesResponse setBranches(Collection branches) { + this.branches = branches; + return this; + } + + public Collection getBranches() { + return branches; + } + + public ListBranchesResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListBranchesResponse that = (ListBranchesResponse) o; + return Objects.equals(branches, that.branches) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(branches, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListBranchesResponse.class) + .add("branches", branches) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ListEndpointsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ListEndpointsRequest.java new file mode 100755 index 000000000..aafa0fb8c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ListEndpointsRequest.java @@ -0,0 +1,79 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class ListEndpointsRequest { + /** Upper bound for items returned. */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** Pagination token to go to the next page of Endpoints. Requests first page if absent. */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + /** + * The Branch that owns this collection of endpoints. Format: + * projects/{project_id}/branches/{branch_id} + */ + @JsonIgnore private String parent; + + public ListEndpointsRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListEndpointsRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListEndpointsRequest setParent(String parent) { + this.parent = parent; + return this; + } + + public String getParent() { + return parent; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListEndpointsRequest that = (ListEndpointsRequest) o; + return Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(parent, that.parent); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken, parent); + } + + @Override + public String toString() { + return new ToStringer(ListEndpointsRequest.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .add("parent", parent) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ListEndpointsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ListEndpointsResponse.java new file mode 100755 index 000000000..e5d5650b6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ListEndpointsResponse.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListEndpointsResponse { + /** List of endpoints. */ + @JsonProperty("endpoints") + private Collection endpoints; + + /** Pagination token to request the next page of endpoints. */ + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListEndpointsResponse setEndpoints(Collection endpoints) { + this.endpoints = endpoints; + return this; + } + + public Collection getEndpoints() { + return endpoints; + } + + public ListEndpointsResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListEndpointsResponse that = (ListEndpointsResponse) o; + return Objects.equals(endpoints, that.endpoints) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(endpoints, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListEndpointsResponse.class) + .add("endpoints", endpoints) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ListProjectsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ListProjectsRequest.java new file mode 100755 index 000000000..51952a99c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ListProjectsRequest.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class ListProjectsRequest { + /** Upper bound for items returned. */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** Pagination token to go to the next page of Projects. Requests first page if absent. */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListProjectsRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListProjectsRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListProjectsRequest that = (ListProjectsRequest) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListProjectsRequest.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ListProjectsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ListProjectsResponse.java new file mode 100755 index 000000000..ab9f15857 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ListProjectsResponse.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListProjectsResponse { + /** Pagination token to request the next page of projects. */ + @JsonProperty("next_page_token") + private String nextPageToken; + + /** List of projects. */ + @JsonProperty("projects") + private Collection projects; + + public ListProjectsResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListProjectsResponse setProjects(Collection projects) { + this.projects = projects; + return this; + } + + public Collection getProjects() { + return projects; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListProjectsResponse that = (ListProjectsResponse) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(projects, that.projects); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, projects); + } + + @Override + public String toString() { + return new ToStringer(ListProjectsResponse.class) + .add("nextPageToken", nextPageToken) + .add("projects", projects) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Operation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Operation.java new file mode 100755 index 000000000..c2cd06544 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Operation.java @@ -0,0 +1,116 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** This resource represents a long-running operation that is the result of a network API call. */ +@Generated +public class Operation { + /** + * If the value is `false`, it means the operation is still in progress. If `true`, the operation + * is completed, and either `error` or `response` is available. + */ + @JsonProperty("done") + private Boolean done; + + /** The error result of the operation in case of failure or cancellation. */ + @JsonProperty("error") + private DatabricksServiceExceptionWithDetailsProto error; + + /** + * Service-specific metadata associated with the operation. It typically contains progress + * information and common metadata such as create time. Some services might not provide such + * metadata. + */ + @JsonProperty("metadata") + private Object metadata; + + /** + * The server-assigned name, which is only unique within the same service that originally returns + * it. If you use the default HTTP mapping, the `name` should be a resource name ending with + * `operations/{unique_id}`. + */ + @JsonProperty("name") + private String name; + + /** The normal, successful response of the operation. */ + @JsonProperty("response") + private Object response; + + public Operation setDone(Boolean done) { + this.done = done; + return this; + } + + public Boolean getDone() { + return done; + } + + public Operation setError(DatabricksServiceExceptionWithDetailsProto error) { + this.error = error; + return this; + } + + public DatabricksServiceExceptionWithDetailsProto getError() { + return error; + } + + public Operation setMetadata(Object metadata) { + this.metadata = metadata; + return this; + } + + public Object getMetadata() { + return metadata; + } + + public Operation setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public Operation setResponse(Object response) { + this.response = response; + return this; + } + + public Object getResponse() { + return response; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Operation that = (Operation) o; + return Objects.equals(done, that.done) + && Objects.equals(error, that.error) + && Objects.equals(metadata, that.metadata) + && Objects.equals(name, that.name) + && Objects.equals(response, that.response); + } + + @Override + public int hashCode() { + return Objects.hash(done, error, metadata, name, response); + } + + @Override + public String toString() { + return new ToStringer(Operation.class) + .add("done", done) + .add("error", error) + .add("metadata", metadata) + .add("name", name) + .add("response", response) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/PostgresAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/PostgresAPI.java new file mode 100755 index 000000000..b2fe70422 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/PostgresAPI.java @@ -0,0 +1,182 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** The Postgres API provides access to a Postgres database via REST API or direct SQL. */ +@Generated +public class PostgresAPI { + private static final Logger LOG = LoggerFactory.getLogger(PostgresAPI.class); + + private final PostgresService impl; + + /** Regular-use constructor */ + public PostgresAPI(ApiClient apiClient) { + impl = new PostgresImpl(apiClient); + } + + /** Constructor for mocks */ + public PostgresAPI(PostgresService mock) { + impl = mock; + } + + /** Create a Branch. */ + public CreateBranchOperation createBranch(CreateBranchRequest request) { + Operation operation = impl.createBranch(request); + return new CreateBranchOperation(impl, operation); + } + + /** Create an Endpoint. */ + public CreateEndpointOperation createEndpoint(CreateEndpointRequest request) { + Operation operation = impl.createEndpoint(request); + return new CreateEndpointOperation(impl, operation); + } + + /** Create a Project. */ + public CreateProjectOperation createProject(CreateProjectRequest request) { + Operation operation = impl.createProject(request); + return new CreateProjectOperation(impl, operation); + } + + public void deleteBranch(String name) { + deleteBranch(new DeleteBranchRequest().setName(name)); + } + + /** Delete a Branch. */ + public void deleteBranch(DeleteBranchRequest request) { + impl.deleteBranch(request); + } + + public void deleteEndpoint(String name) { + deleteEndpoint(new DeleteEndpointRequest().setName(name)); + } + + /** Delete an Endpoint. */ + public void deleteEndpoint(DeleteEndpointRequest request) { + impl.deleteEndpoint(request); + } + + public void deleteProject(String name) { + deleteProject(new DeleteProjectRequest().setName(name)); + } + + /** Delete a Project. */ + public void deleteProject(DeleteProjectRequest request) { + impl.deleteProject(request); + } + + public Branch getBranch(String name) { + return getBranch(new GetBranchRequest().setName(name)); + } + + /** Get a Branch. */ + public Branch getBranch(GetBranchRequest request) { + return impl.getBranch(request); + } + + public Endpoint getEndpoint(String name) { + return getEndpoint(new GetEndpointRequest().setName(name)); + } + + /** Get an Endpoint. */ + public Endpoint getEndpoint(GetEndpointRequest request) { + return impl.getEndpoint(request); + } + + public Operation getOperation(String name) { + return getOperation(new GetOperationRequest().setName(name)); + } + + /** Get an Operation. */ + public Operation getOperation(GetOperationRequest request) { + return impl.getOperation(request); + } + + public Project getProject(String name) { + return getProject(new GetProjectRequest().setName(name)); + } + + /** Get a Project. */ + public Project getProject(GetProjectRequest request) { + return impl.getProject(request); + } + + public Iterable listBranches(String parent) { + return listBranches(new ListBranchesRequest().setParent(parent)); + } + + /** List Branches. */ + public Iterable listBranches(ListBranchesRequest request) { + return new Paginator<>( + request, + impl::listBranches, + ListBranchesResponse::getBranches, + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); + } + + public Iterable listEndpoints(String parent) { + return listEndpoints(new ListEndpointsRequest().setParent(parent)); + } + + /** List Endpoints. */ + public Iterable listEndpoints(ListEndpointsRequest request) { + return new Paginator<>( + request, + impl::listEndpoints, + ListEndpointsResponse::getEndpoints, + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); + } + + /** List Projects. */ + public Iterable listProjects(ListProjectsRequest request) { + return new Paginator<>( + request, + impl::listProjects, + ListProjectsResponse::getProjects, + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); + } + + /** Update a Branch. */ + public UpdateBranchOperation updateBranch(UpdateBranchRequest request) { + Operation operation = impl.updateBranch(request); + return new UpdateBranchOperation(impl, operation); + } + + /** Update an Endpoint. */ + public UpdateEndpointOperation updateEndpoint(UpdateEndpointRequest request) { + Operation operation = impl.updateEndpoint(request); + return new UpdateEndpointOperation(impl, operation); + } + + /** Update a Project. */ + public UpdateProjectOperation updateProject(UpdateProjectRequest request) { + Operation operation = impl.updateProject(request); + return new UpdateProjectOperation(impl, operation); + } + + public PostgresService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/PostgresImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/PostgresImpl.java new file mode 100755 index 000000000..fe18af789 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/PostgresImpl.java @@ -0,0 +1,248 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.support.Generated; +import java.io.IOException; + +/** Package-local implementation of Postgres */ +@Generated +class PostgresImpl implements PostgresService { + private final ApiClient apiClient; + + public PostgresImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public Operation createBranch(CreateBranchRequest request) { + String path = String.format("/api/2.0/postgres/%s/branches", request.getParent()); + try { + Request req = new Request("POST", path, apiClient.serialize(request.getBranch())); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Operation.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public Operation createEndpoint(CreateEndpointRequest request) { + String path = String.format("/api/2.0/postgres/%s/endpoints", request.getParent()); + try { + Request req = new Request("POST", path, apiClient.serialize(request.getEndpoint())); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Operation.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public Operation createProject(CreateProjectRequest request) { + String path = "/api/2.0/postgres/projects"; + try { + Request req = new Request("POST", path, apiClient.serialize(request.getProject())); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Operation.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteBranch(DeleteBranchRequest request) { + String path = String.format("/api/2.0/postgres/%s", request.getName()); + try { + Request req = new Request("DELETE", path); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteEndpoint(DeleteEndpointRequest request) { + String path = String.format("/api/2.0/postgres/%s", request.getName()); + try { + Request req = new Request("DELETE", path); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteProject(DeleteProjectRequest request) { + String path = String.format("/api/2.0/postgres/%s", request.getName()); + try { + Request req = new Request("DELETE", path); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public Branch getBranch(GetBranchRequest request) { + String path = String.format("/api/2.0/postgres/%s", request.getName()); + try { + Request req = new Request("GET", path); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, Branch.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public Endpoint getEndpoint(GetEndpointRequest request) { + String path = String.format("/api/2.0/postgres/%s", request.getName()); + try { + Request req = new Request("GET", path); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, Endpoint.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public Operation getOperation(GetOperationRequest request) { + String path = String.format("/api/2.0/postgres/%s", request.getName()); + try { + Request req = new Request("GET", path); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, Operation.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public Project getProject(GetProjectRequest request) { + String path = String.format("/api/2.0/postgres/%s", request.getName()); + try { + Request req = new Request("GET", path); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, Project.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ListBranchesResponse listBranches(ListBranchesRequest request) { + String path = String.format("/api/2.0/postgres/%s/branches", request.getParent()); + try { + Request req = new Request("GET", path); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListBranchesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ListEndpointsResponse listEndpoints(ListEndpointsRequest request) { + String path = String.format("/api/2.0/postgres/%s/endpoints", request.getParent()); + try { + Request req = new Request("GET", path); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListEndpointsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ListProjectsResponse listProjects(ListProjectsRequest request) { + String path = "/api/2.0/postgres/projects"; + try { + Request req = new Request("GET", path); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListProjectsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public Operation updateBranch(UpdateBranchRequest request) { + String path = String.format("/api/2.0/postgres/%s", request.getName()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request.getBranch())); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Operation.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public Operation updateEndpoint(UpdateEndpointRequest request) { + String path = String.format("/api/2.0/postgres/%s", request.getName()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request.getEndpoint())); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Operation.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public Operation updateProject(UpdateProjectRequest request) { + String path = String.format("/api/2.0/postgres/%s", request.getName()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request.getProject())); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Operation.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/PostgresService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/PostgresService.java new file mode 100755 index 000000000..871532e59 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/PostgresService.java @@ -0,0 +1,62 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; + +/** + * The Postgres API provides access to a Postgres database via REST API or direct SQL. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface PostgresService { + /** Create a Branch. */ + Operation createBranch(CreateBranchRequest createBranchRequest); + + /** Create an Endpoint. */ + Operation createEndpoint(CreateEndpointRequest createEndpointRequest); + + /** Create a Project. */ + Operation createProject(CreateProjectRequest createProjectRequest); + + /** Delete a Branch. */ + void deleteBranch(DeleteBranchRequest deleteBranchRequest); + + /** Delete an Endpoint. */ + void deleteEndpoint(DeleteEndpointRequest deleteEndpointRequest); + + /** Delete a Project. */ + void deleteProject(DeleteProjectRequest deleteProjectRequest); + + /** Get a Branch. */ + Branch getBranch(GetBranchRequest getBranchRequest); + + /** Get an Endpoint. */ + Endpoint getEndpoint(GetEndpointRequest getEndpointRequest); + + /** Get an Operation. */ + Operation getOperation(GetOperationRequest getOperationRequest); + + /** Get a Project. */ + Project getProject(GetProjectRequest getProjectRequest); + + /** List Branches. */ + ListBranchesResponse listBranches(ListBranchesRequest listBranchesRequest); + + /** List Endpoints. */ + ListEndpointsResponse listEndpoints(ListEndpointsRequest listEndpointsRequest); + + /** List Projects. */ + ListProjectsResponse listProjects(ListProjectsRequest listProjectsRequest); + + /** Update a Branch. */ + Operation updateBranch(UpdateBranchRequest updateBranchRequest); + + /** Update an Endpoint. */ + Operation updateEndpoint(UpdateEndpointRequest updateEndpointRequest); + + /** Update a Project. */ + Operation updateProject(UpdateProjectRequest updateProjectRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Project.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Project.java new file mode 100755 index 000000000..0f7827672 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Project.java @@ -0,0 +1,311 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.protobuf.Duration; +import com.google.protobuf.Timestamp; +import java.util.Objects; + +@Generated +public class Project { + /** The logical size limit for a branch. */ + @JsonProperty("branch_logical_size_limit_bytes") + private Long branchLogicalSizeLimitBytes; + + /** The most recent time when any endpoint of this project was active. */ + @JsonProperty("compute_last_active_time") + private Timestamp computeLastActiveTime; + + /** A timestamp indicating when the project was created. */ + @JsonProperty("create_time") + private Timestamp createTime; + + /** */ + @JsonProperty("default_endpoint_settings") + private ProjectDefaultEndpointSettings defaultEndpointSettings; + + /** Human-readable project name. */ + @JsonProperty("display_name") + private String displayName; + + /** */ + @JsonProperty("effective_default_endpoint_settings") + private ProjectDefaultEndpointSettings effectiveDefaultEndpointSettings; + + /** */ + @JsonProperty("effective_display_name") + private String effectiveDisplayName; + + /** */ + @JsonProperty("effective_history_retention_duration") + private Duration effectiveHistoryRetentionDuration; + + /** */ + @JsonProperty("effective_pg_version") + private Long effectivePgVersion; + + /** */ + @JsonProperty("effective_settings") + private ProjectSettings effectiveSettings; + + /** + * The number of seconds to retain the shared history for point in time recovery for all branches + * in this project. + */ + @JsonProperty("history_retention_duration") + private Duration historyRetentionDuration; + + /** The resource name of the project. Format: projects/{project_id} */ + @JsonProperty("name") + private String name; + + /** The major Postgres version number. */ + @JsonProperty("pg_version") + private Long pgVersion; + + /** */ + @JsonProperty("settings") + private ProjectSettings settings; + + /** + * The current space occupied by the project in storage. Synthetic storage size combines the + * logical data size and Write-Ahead Log (WAL) size for all branches in a project. + */ + @JsonProperty("synthetic_storage_size_bytes") + private Long syntheticStorageSizeBytes; + + /** System generated unique ID for the project. */ + @JsonProperty("uid") + private String uid; + + /** A timestamp indicating when the project was last updated. */ + @JsonProperty("update_time") + private Timestamp updateTime; + + public Project setBranchLogicalSizeLimitBytes(Long branchLogicalSizeLimitBytes) { + this.branchLogicalSizeLimitBytes = branchLogicalSizeLimitBytes; + return this; + } + + public Long getBranchLogicalSizeLimitBytes() { + return branchLogicalSizeLimitBytes; + } + + public Project setComputeLastActiveTime(Timestamp computeLastActiveTime) { + this.computeLastActiveTime = computeLastActiveTime; + return this; + } + + public Timestamp getComputeLastActiveTime() { + return computeLastActiveTime; + } + + public Project setCreateTime(Timestamp createTime) { + this.createTime = createTime; + return this; + } + + public Timestamp getCreateTime() { + return createTime; + } + + public Project setDefaultEndpointSettings( + ProjectDefaultEndpointSettings defaultEndpointSettings) { + this.defaultEndpointSettings = defaultEndpointSettings; + return this; + } + + public ProjectDefaultEndpointSettings getDefaultEndpointSettings() { + return defaultEndpointSettings; + } + + public Project setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public Project setEffectiveDefaultEndpointSettings( + ProjectDefaultEndpointSettings effectiveDefaultEndpointSettings) { + this.effectiveDefaultEndpointSettings = effectiveDefaultEndpointSettings; + return this; + } + + public ProjectDefaultEndpointSettings getEffectiveDefaultEndpointSettings() { + return effectiveDefaultEndpointSettings; + } + + public Project setEffectiveDisplayName(String effectiveDisplayName) { + this.effectiveDisplayName = effectiveDisplayName; + return this; + } + + public String getEffectiveDisplayName() { + return effectiveDisplayName; + } + + public Project setEffectiveHistoryRetentionDuration(Duration effectiveHistoryRetentionDuration) { + this.effectiveHistoryRetentionDuration = effectiveHistoryRetentionDuration; + return this; + } + + public Duration getEffectiveHistoryRetentionDuration() { + return effectiveHistoryRetentionDuration; + } + + public Project setEffectivePgVersion(Long effectivePgVersion) { + this.effectivePgVersion = effectivePgVersion; + return this; + } + + public Long getEffectivePgVersion() { + return effectivePgVersion; + } + + public Project setEffectiveSettings(ProjectSettings effectiveSettings) { + this.effectiveSettings = effectiveSettings; + return this; + } + + public ProjectSettings getEffectiveSettings() { + return effectiveSettings; + } + + public Project setHistoryRetentionDuration(Duration historyRetentionDuration) { + this.historyRetentionDuration = historyRetentionDuration; + return this; + } + + public Duration getHistoryRetentionDuration() { + return historyRetentionDuration; + } + + public Project setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public Project setPgVersion(Long pgVersion) { + this.pgVersion = pgVersion; + return this; + } + + public Long getPgVersion() { + return pgVersion; + } + + public Project setSettings(ProjectSettings settings) { + this.settings = settings; + return this; + } + + public ProjectSettings getSettings() { + return settings; + } + + public Project setSyntheticStorageSizeBytes(Long syntheticStorageSizeBytes) { + this.syntheticStorageSizeBytes = syntheticStorageSizeBytes; + return this; + } + + public Long getSyntheticStorageSizeBytes() { + return syntheticStorageSizeBytes; + } + + public Project setUid(String uid) { + this.uid = uid; + return this; + } + + public String getUid() { + return uid; + } + + public Project setUpdateTime(Timestamp updateTime) { + this.updateTime = updateTime; + return this; + } + + public Timestamp getUpdateTime() { + return updateTime; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Project that = (Project) o; + return Objects.equals(branchLogicalSizeLimitBytes, that.branchLogicalSizeLimitBytes) + && Objects.equals(computeLastActiveTime, that.computeLastActiveTime) + && Objects.equals(createTime, that.createTime) + && Objects.equals(defaultEndpointSettings, that.defaultEndpointSettings) + && Objects.equals(displayName, that.displayName) + && Objects.equals(effectiveDefaultEndpointSettings, that.effectiveDefaultEndpointSettings) + && Objects.equals(effectiveDisplayName, that.effectiveDisplayName) + && Objects.equals(effectiveHistoryRetentionDuration, that.effectiveHistoryRetentionDuration) + && Objects.equals(effectivePgVersion, that.effectivePgVersion) + && Objects.equals(effectiveSettings, that.effectiveSettings) + && Objects.equals(historyRetentionDuration, that.historyRetentionDuration) + && Objects.equals(name, that.name) + && Objects.equals(pgVersion, that.pgVersion) + && Objects.equals(settings, that.settings) + && Objects.equals(syntheticStorageSizeBytes, that.syntheticStorageSizeBytes) + && Objects.equals(uid, that.uid) + && Objects.equals(updateTime, that.updateTime); + } + + @Override + public int hashCode() { + return Objects.hash( + branchLogicalSizeLimitBytes, + computeLastActiveTime, + createTime, + defaultEndpointSettings, + displayName, + effectiveDefaultEndpointSettings, + effectiveDisplayName, + effectiveHistoryRetentionDuration, + effectivePgVersion, + effectiveSettings, + historyRetentionDuration, + name, + pgVersion, + settings, + syntheticStorageSizeBytes, + uid, + updateTime); + } + + @Override + public String toString() { + return new ToStringer(Project.class) + .add("branchLogicalSizeLimitBytes", branchLogicalSizeLimitBytes) + .add("computeLastActiveTime", computeLastActiveTime) + .add("createTime", createTime) + .add("defaultEndpointSettings", defaultEndpointSettings) + .add("displayName", displayName) + .add("effectiveDefaultEndpointSettings", effectiveDefaultEndpointSettings) + .add("effectiveDisplayName", effectiveDisplayName) + .add("effectiveHistoryRetentionDuration", effectiveHistoryRetentionDuration) + .add("effectivePgVersion", effectivePgVersion) + .add("effectiveSettings", effectiveSettings) + .add("historyRetentionDuration", historyRetentionDuration) + .add("name", name) + .add("pgVersion", pgVersion) + .add("settings", settings) + .add("syntheticStorageSizeBytes", syntheticStorageSizeBytes) + .add("uid", uid) + .add("updateTime", updateTime) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectDefaultEndpointSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectDefaultEndpointSettings.java new file mode 100755 index 000000000..a66bc6d5d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectDefaultEndpointSettings.java @@ -0,0 +1,113 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.protobuf.Duration; +import java.util.Map; +import java.util.Objects; + +/** A collection of settings for a compute endpoint. */ +@Generated +public class ProjectDefaultEndpointSettings { + /** The maximum number of Compute Units. */ + @JsonProperty("autoscaling_limit_max_cu") + private Double autoscalingLimitMaxCu; + + /** The minimum number of Compute Units. */ + @JsonProperty("autoscaling_limit_min_cu") + private Double autoscalingLimitMinCu; + + /** A raw representation of Postgres settings. */ + @JsonProperty("pg_settings") + private Map pgSettings; + + /** A raw representation of PgBouncer settings. */ + @JsonProperty("pgbouncer_settings") + private Map pgbouncerSettings; + + /** Duration of inactivity after which the compute endpoint is automatically suspended. */ + @JsonProperty("suspend_timeout_duration") + private Duration suspendTimeoutDuration; + + public ProjectDefaultEndpointSettings setAutoscalingLimitMaxCu(Double autoscalingLimitMaxCu) { + this.autoscalingLimitMaxCu = autoscalingLimitMaxCu; + return this; + } + + public Double getAutoscalingLimitMaxCu() { + return autoscalingLimitMaxCu; + } + + public ProjectDefaultEndpointSettings setAutoscalingLimitMinCu(Double autoscalingLimitMinCu) { + this.autoscalingLimitMinCu = autoscalingLimitMinCu; + return this; + } + + public Double getAutoscalingLimitMinCu() { + return autoscalingLimitMinCu; + } + + public ProjectDefaultEndpointSettings setPgSettings(Map pgSettings) { + this.pgSettings = pgSettings; + return this; + } + + public Map getPgSettings() { + return pgSettings; + } + + public ProjectDefaultEndpointSettings setPgbouncerSettings( + Map pgbouncerSettings) { + this.pgbouncerSettings = pgbouncerSettings; + return this; + } + + public Map getPgbouncerSettings() { + return pgbouncerSettings; + } + + public ProjectDefaultEndpointSettings setSuspendTimeoutDuration(Duration suspendTimeoutDuration) { + this.suspendTimeoutDuration = suspendTimeoutDuration; + return this; + } + + public Duration getSuspendTimeoutDuration() { + return suspendTimeoutDuration; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ProjectDefaultEndpointSettings that = (ProjectDefaultEndpointSettings) o; + return Objects.equals(autoscalingLimitMaxCu, that.autoscalingLimitMaxCu) + && Objects.equals(autoscalingLimitMinCu, that.autoscalingLimitMinCu) + && Objects.equals(pgSettings, that.pgSettings) + && Objects.equals(pgbouncerSettings, that.pgbouncerSettings) + && Objects.equals(suspendTimeoutDuration, that.suspendTimeoutDuration); + } + + @Override + public int hashCode() { + return Objects.hash( + autoscalingLimitMaxCu, + autoscalingLimitMinCu, + pgSettings, + pgbouncerSettings, + suspendTimeoutDuration); + } + + @Override + public String toString() { + return new ToStringer(ProjectDefaultEndpointSettings.class) + .add("autoscalingLimitMaxCu", autoscalingLimitMaxCu) + .add("autoscalingLimitMinCu", autoscalingLimitMinCu) + .add("pgSettings", pgSettings) + .add("pgbouncerSettings", pgbouncerSettings) + .add("suspendTimeoutDuration", suspendTimeoutDuration) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectOperationMetadata.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectOperationMetadata.java new file mode 100755 index 000000000..478db673d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectOperationMetadata.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +public class ProjectOperationMetadata { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(ProjectOperationMetadata.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectSettings.java new file mode 100755 index 000000000..faf854e0e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectSettings.java @@ -0,0 +1,47 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class ProjectSettings { + /** + * Sets wal_level=logical for all compute endpoints in this project. All active endpoints will be + * suspended. Once enabled, logical replication cannot be disabled. + */ + @JsonProperty("enable_logical_replication") + private Boolean enableLogicalReplication; + + public ProjectSettings setEnableLogicalReplication(Boolean enableLogicalReplication) { + this.enableLogicalReplication = enableLogicalReplication; + return this; + } + + public Boolean getEnableLogicalReplication() { + return enableLogicalReplication; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ProjectSettings that = (ProjectSettings) o; + return Objects.equals(enableLogicalReplication, that.enableLogicalReplication); + } + + @Override + public int hashCode() { + return Objects.hash(enableLogicalReplication); + } + + @Override + public String toString() { + return new ToStringer(ProjectSettings.class) + .add("enableLogicalReplication", enableLogicalReplication) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateBranchOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateBranchOperation.java new file mode 100755 index 000000000..0b6a8a788 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateBranchOperation.java @@ -0,0 +1,162 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.utils.SerDeUtils; +import com.databricks.sdk.service.common.lro.LroOptions; +import com.databricks.sdk.support.Generated; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.time.Duration; +import java.util.Optional; +import java.util.concurrent.TimeoutException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Wrapper for interacting with a long-running updateBranch operation. Provides methods to wait for + * completion, check status, cancel, and access metadata. + */ +@Generated +public class UpdateBranchOperation { + private static final Logger LOG = LoggerFactory.getLogger(UpdateBranchOperation.class); + + private final PostgresService impl; + private Operation operation; + private final ObjectMapper objectMapper; + + public UpdateBranchOperation(PostgresService impl, Operation operation) { + this.impl = impl; + this.operation = operation; + this.objectMapper = SerDeUtils.createMapper(); + } + + /** + * Wait for the operation to complete and return the resulting Branch. Waits indefinitely if no + * timeout is specified. + * + * @return the created Branch + * @throws TimeoutException if the operation doesn't complete within the timeout + * @throws DatabricksException if the operation fails + */ + public Branch waitForCompletion() throws TimeoutException { + return waitForCompletion(Optional.empty()); + } + + /** + * Wait for the operation to complete and return the resulting Branch. + * + * @param options the options for configuring the wait behavior, can be empty for defaults + * @return the created Branch + * @throws TimeoutException if the operation doesn't complete within the timeout + * @throws DatabricksException if the operation fails + */ + public Branch waitForCompletion(Optional options) throws TimeoutException { + Optional timeout = options.flatMap(LroOptions::getTimeout); + long deadline = + timeout.isPresent() + ? System.currentTimeMillis() + timeout.get().toMillis() + : Long.MAX_VALUE; + String statusMessage = "polling operation..."; + int attempt = 1; + + while (System.currentTimeMillis() < deadline) { + // Refresh the operation state + refreshOperation(); + + if (operation.getDone() != null && operation.getDone()) { + // Operation completed, check for success or failure + if (operation.getError() != null) { + String errorMsg = "unknown error"; + if (operation.getError().getMessage() != null + && !operation.getError().getMessage().isEmpty()) { + errorMsg = operation.getError().getMessage(); + } + + if (operation.getError().getErrorCode() != null) { + errorMsg = String.format("[%s] %s", operation.getError().getErrorCode(), errorMsg); + } + + throw new DatabricksException("Operation failed: " + errorMsg); + } + + // Operation completed successfully, unmarshal response + if (operation.getResponse() == null) { + throw new DatabricksException("Operation completed but no response available"); + } + + try { + JsonNode responseJson = objectMapper.valueToTree(operation.getResponse()); + return objectMapper.treeToValue(responseJson, Branch.class); + } catch (JsonProcessingException e) { + throw new DatabricksException( + "Failed to unmarshal branch response: " + e.getMessage(), e); + } + } + + // Operation still in progress, wait before polling again + String prefix = String.format("operation=%s", operation.getName()); + int sleep = Math.min(attempt, 10); // sleep 10s max per attempt + LOG.info("{}: operation in progress (sleeping ~{}s)", prefix, sleep); + + try { + Thread.sleep((long) (sleep * 1000L + Math.random() * 1000)); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new DatabricksException("Current thread was interrupted", e); + } + attempt++; + } + + String timeoutMessage = + timeout.isPresent() + ? String.format("Operation timed out after %s: %s", timeout.get(), statusMessage) + : String.format("Operation timed out: %s", statusMessage); + throw new TimeoutException(timeoutMessage); + } + + /** + * Get the operation name. + * + * @return the operation name + */ + public String getName() { + return operation.getName(); + } + + /** + * Get the operation metadata. + * + * @return the operation metadata, or null if not available + * @throws DatabricksException if the metadata cannot be deserialized + */ + public BranchOperationMetadata getMetadata() { + if (operation.getMetadata() == null) { + return null; + } + + try { + JsonNode metadataJson = objectMapper.valueToTree(operation.getMetadata()); + return objectMapper.treeToValue(metadataJson, BranchOperationMetadata.class); + } catch (JsonProcessingException e) { + throw new DatabricksException("Failed to unmarshal operation metadata: " + e.getMessage(), e); + } + } + + /** + * Check if the operation is done. This method refreshes the operation state before checking. + * + * @return true if the operation is complete, false otherwise + * @throws DatabricksException if the status check fails + */ + public boolean isDone() { + refreshOperation(); + return operation.getDone() != null && operation.getDone(); + } + + /** Refresh the operation state by polling the server. */ + private void refreshOperation() { + operation = impl.getOperation(new GetOperationRequest().setName(operation.getName())); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateBranchRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateBranchRequest.java new file mode 100755 index 000000000..7086900ba --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateBranchRequest.java @@ -0,0 +1,82 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.protobuf.FieldMask; +import java.util.Objects; + +@Generated +public class UpdateBranchRequest { + /** + * The Branch to update. + * + *

The branch's `name` field is used to identify the branch to update. Format: + * projects/{project_id}/branches/{branch_id} + */ + @JsonProperty("branch") + private Branch branch; + + /** The resource name of the branch. Format: projects/{project_id}/branches/{branch_id} */ + @JsonIgnore private String name; + + /** The list of fields to update. If unspecified, all fields will be updated when possible. */ + @JsonIgnore + @QueryParam("update_mask") + private FieldMask updateMask; + + public UpdateBranchRequest setBranch(Branch branch) { + this.branch = branch; + return this; + } + + public Branch getBranch() { + return branch; + } + + public UpdateBranchRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public UpdateBranchRequest setUpdateMask(FieldMask updateMask) { + this.updateMask = updateMask; + return this; + } + + public FieldMask getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateBranchRequest that = (UpdateBranchRequest) o; + return Objects.equals(branch, that.branch) + && Objects.equals(name, that.name) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(branch, name, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateBranchRequest.class) + .add("branch", branch) + .add("name", name) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateEndpointOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateEndpointOperation.java new file mode 100755 index 000000000..2f4882f31 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateEndpointOperation.java @@ -0,0 +1,162 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.utils.SerDeUtils; +import com.databricks.sdk.service.common.lro.LroOptions; +import com.databricks.sdk.support.Generated; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.time.Duration; +import java.util.Optional; +import java.util.concurrent.TimeoutException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Wrapper for interacting with a long-running updateEndpoint operation. Provides methods to wait + * for completion, check status, cancel, and access metadata. + */ +@Generated +public class UpdateEndpointOperation { + private static final Logger LOG = LoggerFactory.getLogger(UpdateEndpointOperation.class); + + private final PostgresService impl; + private Operation operation; + private final ObjectMapper objectMapper; + + public UpdateEndpointOperation(PostgresService impl, Operation operation) { + this.impl = impl; + this.operation = operation; + this.objectMapper = SerDeUtils.createMapper(); + } + + /** + * Wait for the operation to complete and return the resulting Endpoint. Waits indefinitely if no + * timeout is specified. + * + * @return the created Endpoint + * @throws TimeoutException if the operation doesn't complete within the timeout + * @throws DatabricksException if the operation fails + */ + public Endpoint waitForCompletion() throws TimeoutException { + return waitForCompletion(Optional.empty()); + } + + /** + * Wait for the operation to complete and return the resulting Endpoint. + * + * @param options the options for configuring the wait behavior, can be empty for defaults + * @return the created Endpoint + * @throws TimeoutException if the operation doesn't complete within the timeout + * @throws DatabricksException if the operation fails + */ + public Endpoint waitForCompletion(Optional options) throws TimeoutException { + Optional timeout = options.flatMap(LroOptions::getTimeout); + long deadline = + timeout.isPresent() + ? System.currentTimeMillis() + timeout.get().toMillis() + : Long.MAX_VALUE; + String statusMessage = "polling operation..."; + int attempt = 1; + + while (System.currentTimeMillis() < deadline) { + // Refresh the operation state + refreshOperation(); + + if (operation.getDone() != null && operation.getDone()) { + // Operation completed, check for success or failure + if (operation.getError() != null) { + String errorMsg = "unknown error"; + if (operation.getError().getMessage() != null + && !operation.getError().getMessage().isEmpty()) { + errorMsg = operation.getError().getMessage(); + } + + if (operation.getError().getErrorCode() != null) { + errorMsg = String.format("[%s] %s", operation.getError().getErrorCode(), errorMsg); + } + + throw new DatabricksException("Operation failed: " + errorMsg); + } + + // Operation completed successfully, unmarshal response + if (operation.getResponse() == null) { + throw new DatabricksException("Operation completed but no response available"); + } + + try { + JsonNode responseJson = objectMapper.valueToTree(operation.getResponse()); + return objectMapper.treeToValue(responseJson, Endpoint.class); + } catch (JsonProcessingException e) { + throw new DatabricksException( + "Failed to unmarshal endpoint response: " + e.getMessage(), e); + } + } + + // Operation still in progress, wait before polling again + String prefix = String.format("operation=%s", operation.getName()); + int sleep = Math.min(attempt, 10); // sleep 10s max per attempt + LOG.info("{}: operation in progress (sleeping ~{}s)", prefix, sleep); + + try { + Thread.sleep((long) (sleep * 1000L + Math.random() * 1000)); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new DatabricksException("Current thread was interrupted", e); + } + attempt++; + } + + String timeoutMessage = + timeout.isPresent() + ? String.format("Operation timed out after %s: %s", timeout.get(), statusMessage) + : String.format("Operation timed out: %s", statusMessage); + throw new TimeoutException(timeoutMessage); + } + + /** + * Get the operation name. + * + * @return the operation name + */ + public String getName() { + return operation.getName(); + } + + /** + * Get the operation metadata. + * + * @return the operation metadata, or null if not available + * @throws DatabricksException if the metadata cannot be deserialized + */ + public EndpointOperationMetadata getMetadata() { + if (operation.getMetadata() == null) { + return null; + } + + try { + JsonNode metadataJson = objectMapper.valueToTree(operation.getMetadata()); + return objectMapper.treeToValue(metadataJson, EndpointOperationMetadata.class); + } catch (JsonProcessingException e) { + throw new DatabricksException("Failed to unmarshal operation metadata: " + e.getMessage(), e); + } + } + + /** + * Check if the operation is done. This method refreshes the operation state before checking. + * + * @return true if the operation is complete, false otherwise + * @throws DatabricksException if the status check fails + */ + public boolean isDone() { + refreshOperation(); + return operation.getDone() != null && operation.getDone(); + } + + /** Refresh the operation state by polling the server. */ + private void refreshOperation() { + operation = impl.getOperation(new GetOperationRequest().setName(operation.getName())); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateEndpointRequest.java new file mode 100755 index 000000000..993c7b3c3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateEndpointRequest.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.protobuf.FieldMask; +import java.util.Objects; + +@Generated +public class UpdateEndpointRequest { + /** + * The Endpoint to update. + * + *

The endpoint's `name` field is used to identify the endpoint to update. Format: + * projects/{project_id}/branches/{branch_id}/endpoints/{endpoint_id} + */ + @JsonProperty("endpoint") + private Endpoint endpoint; + + /** + * The resource name of the endpoint. Format: + * projects/{project_id}/branches/{branch_id}/endpoints/{endpoint_id} + */ + @JsonIgnore private String name; + + /** The list of fields to update. If unspecified, all fields will be updated when possible. */ + @JsonIgnore + @QueryParam("update_mask") + private FieldMask updateMask; + + public UpdateEndpointRequest setEndpoint(Endpoint endpoint) { + this.endpoint = endpoint; + return this; + } + + public Endpoint getEndpoint() { + return endpoint; + } + + public UpdateEndpointRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public UpdateEndpointRequest setUpdateMask(FieldMask updateMask) { + this.updateMask = updateMask; + return this; + } + + public FieldMask getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateEndpointRequest that = (UpdateEndpointRequest) o; + return Objects.equals(endpoint, that.endpoint) + && Objects.equals(name, that.name) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(endpoint, name, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateEndpointRequest.class) + .add("endpoint", endpoint) + .add("name", name) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateProjectOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateProjectOperation.java new file mode 100755 index 000000000..f9742e5af --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateProjectOperation.java @@ -0,0 +1,162 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.utils.SerDeUtils; +import com.databricks.sdk.service.common.lro.LroOptions; +import com.databricks.sdk.support.Generated; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.time.Duration; +import java.util.Optional; +import java.util.concurrent.TimeoutException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Wrapper for interacting with a long-running updateProject operation. Provides methods to wait for + * completion, check status, cancel, and access metadata. + */ +@Generated +public class UpdateProjectOperation { + private static final Logger LOG = LoggerFactory.getLogger(UpdateProjectOperation.class); + + private final PostgresService impl; + private Operation operation; + private final ObjectMapper objectMapper; + + public UpdateProjectOperation(PostgresService impl, Operation operation) { + this.impl = impl; + this.operation = operation; + this.objectMapper = SerDeUtils.createMapper(); + } + + /** + * Wait for the operation to complete and return the resulting Project. Waits indefinitely if no + * timeout is specified. + * + * @return the created Project + * @throws TimeoutException if the operation doesn't complete within the timeout + * @throws DatabricksException if the operation fails + */ + public Project waitForCompletion() throws TimeoutException { + return waitForCompletion(Optional.empty()); + } + + /** + * Wait for the operation to complete and return the resulting Project. + * + * @param options the options for configuring the wait behavior, can be empty for defaults + * @return the created Project + * @throws TimeoutException if the operation doesn't complete within the timeout + * @throws DatabricksException if the operation fails + */ + public Project waitForCompletion(Optional options) throws TimeoutException { + Optional timeout = options.flatMap(LroOptions::getTimeout); + long deadline = + timeout.isPresent() + ? System.currentTimeMillis() + timeout.get().toMillis() + : Long.MAX_VALUE; + String statusMessage = "polling operation..."; + int attempt = 1; + + while (System.currentTimeMillis() < deadline) { + // Refresh the operation state + refreshOperation(); + + if (operation.getDone() != null && operation.getDone()) { + // Operation completed, check for success or failure + if (operation.getError() != null) { + String errorMsg = "unknown error"; + if (operation.getError().getMessage() != null + && !operation.getError().getMessage().isEmpty()) { + errorMsg = operation.getError().getMessage(); + } + + if (operation.getError().getErrorCode() != null) { + errorMsg = String.format("[%s] %s", operation.getError().getErrorCode(), errorMsg); + } + + throw new DatabricksException("Operation failed: " + errorMsg); + } + + // Operation completed successfully, unmarshal response + if (operation.getResponse() == null) { + throw new DatabricksException("Operation completed but no response available"); + } + + try { + JsonNode responseJson = objectMapper.valueToTree(operation.getResponse()); + return objectMapper.treeToValue(responseJson, Project.class); + } catch (JsonProcessingException e) { + throw new DatabricksException( + "Failed to unmarshal project response: " + e.getMessage(), e); + } + } + + // Operation still in progress, wait before polling again + String prefix = String.format("operation=%s", operation.getName()); + int sleep = Math.min(attempt, 10); // sleep 10s max per attempt + LOG.info("{}: operation in progress (sleeping ~{}s)", prefix, sleep); + + try { + Thread.sleep((long) (sleep * 1000L + Math.random() * 1000)); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new DatabricksException("Current thread was interrupted", e); + } + attempt++; + } + + String timeoutMessage = + timeout.isPresent() + ? String.format("Operation timed out after %s: %s", timeout.get(), statusMessage) + : String.format("Operation timed out: %s", statusMessage); + throw new TimeoutException(timeoutMessage); + } + + /** + * Get the operation name. + * + * @return the operation name + */ + public String getName() { + return operation.getName(); + } + + /** + * Get the operation metadata. + * + * @return the operation metadata, or null if not available + * @throws DatabricksException if the metadata cannot be deserialized + */ + public ProjectOperationMetadata getMetadata() { + if (operation.getMetadata() == null) { + return null; + } + + try { + JsonNode metadataJson = objectMapper.valueToTree(operation.getMetadata()); + return objectMapper.treeToValue(metadataJson, ProjectOperationMetadata.class); + } catch (JsonProcessingException e) { + throw new DatabricksException("Failed to unmarshal operation metadata: " + e.getMessage(), e); + } + } + + /** + * Check if the operation is done. This method refreshes the operation state before checking. + * + * @return true if the operation is complete, false otherwise + * @throws DatabricksException if the status check fails + */ + public boolean isDone() { + refreshOperation(); + return operation.getDone() != null && operation.getDone(); + } + + /** Refresh the operation state by polling the server. */ + private void refreshOperation() { + operation = impl.getOperation(new GetOperationRequest().setName(operation.getName())); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateProjectRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateProjectRequest.java new file mode 100755 index 000000000..e4dcd01d3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateProjectRequest.java @@ -0,0 +1,82 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.protobuf.FieldMask; +import java.util.Objects; + +@Generated +public class UpdateProjectRequest { + /** The resource name of the project. Format: projects/{project_id} */ + @JsonIgnore private String name; + + /** + * The Project to update. + * + *

The project's `name` field is used to identify the project to update. Format: + * projects/{project_id} + */ + @JsonProperty("project") + private Project project; + + /** The list of fields to update. If unspecified, all fields will be updated when possible. */ + @JsonIgnore + @QueryParam("update_mask") + private FieldMask updateMask; + + public UpdateProjectRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public UpdateProjectRequest setProject(Project project) { + this.project = project; + return this; + } + + public Project getProject() { + return project; + } + + public UpdateProjectRequest setUpdateMask(FieldMask updateMask) { + this.updateMask = updateMask; + return this; + } + + public FieldMask getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateProjectRequest that = (UpdateProjectRequest) o; + return Objects.equals(name, that.name) + && Objects.equals(project, that.project) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(name, project, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateProjectRequest.class) + .add("name", name) + .add("project", project) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule.java index 65bb8dc20..7cf08efee 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule.java @@ -72,6 +72,10 @@ public class CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule { @JsonProperty("endpoint_service") private String endpointService; + /** */ + @JsonProperty("error_message") + private String errorMessage; + /** * The ID of a network connectivity configuration, which is the parent resource of this private * endpoint rule object. @@ -185,6 +189,16 @@ public String getEndpointService() { return endpointService; } + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule setErrorMessage( + String errorMessage) { + this.errorMessage = errorMessage; + return this; + } + + public String getErrorMessage() { + return errorMessage; + } + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule setNetworkConnectivityConfigId(String networkConnectivityConfigId) { this.networkConnectivityConfigId = networkConnectivityConfigId; @@ -248,6 +262,7 @@ public boolean equals(Object o) { && Objects.equals(domainNames, that.domainNames) && Objects.equals(enabled, that.enabled) && Objects.equals(endpointService, that.endpointService) + && Objects.equals(errorMessage, that.errorMessage) && Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId) && Objects.equals(resourceNames, that.resourceNames) && Objects.equals(ruleId, that.ruleId) @@ -266,6 +281,7 @@ public int hashCode() { domainNames, enabled, endpointService, + errorMessage, networkConnectivityConfigId, resourceNames, ruleId, @@ -284,6 +300,7 @@ public String toString() { .add("domainNames", domainNames) .add("enabled", enabled) .add("endpointService", endpointService) + .add("errorMessage", errorMessage) .add("networkConnectivityConfigId", networkConnectivityConfigId) .add("resourceNames", resourceNames) .add("ruleId", ruleId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRule.java index c233b7892..e0017ae8d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRule.java @@ -53,6 +53,10 @@ public class NccAzurePrivateEndpointRule { @JsonProperty("endpoint_name") private String endpointName; + /** */ + @JsonProperty("error_message") + private String errorMessage; + /** * Only used by private endpoints to Azure first-party services. * @@ -136,6 +140,15 @@ public String getEndpointName() { return endpointName; } + public NccAzurePrivateEndpointRule setErrorMessage(String errorMessage) { + this.errorMessage = errorMessage; + return this; + } + + public String getErrorMessage() { + return errorMessage; + } + public NccAzurePrivateEndpointRule setGroupId(String groupId) { this.groupId = groupId; return this; @@ -193,6 +206,7 @@ public boolean equals(Object o) { && Objects.equals(deactivatedAt, that.deactivatedAt) && Objects.equals(domainNames, that.domainNames) && Objects.equals(endpointName, that.endpointName) + && Objects.equals(errorMessage, that.errorMessage) && Objects.equals(groupId, that.groupId) && Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId) && Objects.equals(resourceId, that.resourceId) @@ -209,6 +223,7 @@ public int hashCode() { deactivatedAt, domainNames, endpointName, + errorMessage, groupId, networkConnectivityConfigId, resourceId, @@ -225,6 +240,7 @@ public String toString() { .add("deactivatedAt", deactivatedAt) .add("domainNames", domainNames) .add("endpointName", endpointName) + .add("errorMessage", errorMessage) .add("groupId", groupId) .add("networkConnectivityConfigId", networkConnectivityConfigId) .add("resourceId", resourceId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReasonCode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReasonCode.java index ddd2de72a..2d8dd713b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReasonCode.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReasonCode.java @@ -59,6 +59,8 @@ public enum TerminationReasonCode { CLUSTER_OPERATION_TIMEOUT, COMMUNICATION_LOST, CONTAINER_LAUNCH_FAILURE, + CONTROL_PLANE_CONNECTION_FAILURE, + CONTROL_PLANE_CONNECTION_FAILURE_DUE_TO_MISCONFIG, CONTROL_PLANE_REQUEST_FAILURE, CONTROL_PLANE_REQUEST_FAILURE_DUE_TO_MISCONFIG, DATABASE_CONNECTION_FAILURE, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PatchEndpointBudgetPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PatchEndpointBudgetPolicyRequest.java index f7172ad00..c1d571df8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PatchEndpointBudgetPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PatchEndpointBudgetPolicyRequest.java @@ -10,10 +10,7 @@ @Generated public class PatchEndpointBudgetPolicyRequest { - /** - * The budget policy id to be applied (hima-sheth) TODO: remove this once we've migrated to usage - * policies - */ + /** The budget policy id to be applied */ @JsonProperty("budget_policy_id") private String budgetPolicyId;