diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 57c80b78c..f221dbb73 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -c0e83a0de9da6aaad4cd6924fb65b1496bfdedcb \ No newline at end of file +24e0a363326bc3a60f3dfa45cd3c125d5629c34f \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 31bcf3f71..5b48a10ca 100644 --- a/.gitattributes +++ b/.gitattributes @@ -20,6 +20,8 @@ /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeploymentState.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeploymentStatus.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifest.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceExperimentSpec.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceExperimentSpecExperimentPermission.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceJobSpec.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceJobSpecJobPermission.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceSecretSpec.java linguist-generated=true @@ -40,6 +42,8 @@ /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceDatabase.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceDatabaseDatabasePermission.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceExperiment.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceExperimentExperimentPermission.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceGenieSpace.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceGenieSpaceGenieSpacePermission.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceJob.java linguist-generated=true @@ -81,6 +85,8 @@ /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppRequest.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppUpdateRequest.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetCustomTemplateRequest.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GitRepository.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GitSource.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppDeploymentsRequest.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppDeploymentsResponse.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppsRequest.java linguist-generated=true @@ -2082,7 +2088,9 @@ /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointOperationMetadata.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointPoolerMode.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointSettings.java linguist-generated=true -/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointState.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointSpec.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatus.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatusState.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointType.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ErrorCode.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/GetBranchRequest.java linguist-generated=true diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 09184f38a..9fb0cbc5a 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -13,3 +13,18 @@ ### Internal Changes ### API Changes +* Add `gitRepository` field for `com.databricks.sdk.service.apps.App`. +* Add `gitSource` field for `com.databricks.sdk.service.apps.AppDeployment`. +* Add `experimentSpec` field for `com.databricks.sdk.service.apps.AppManifestAppResourceSpec`. +* Add `experiment` field for `com.databricks.sdk.service.apps.AppResource`. +* Add `gitRepository` field for `com.databricks.sdk.service.apps.AppUpdate`. +* Add `excludedTableFullNames` field for `com.databricks.sdk.service.dataquality.AnomalyDetectionConfig`. +* Add `groupName` field for `com.databricks.sdk.service.jobs.JobRunAs`. +* Add `rowFilter` field for `com.databricks.sdk.service.pipelines.TableSpecificConfig`. +* Add `spec` and `status` fields for `com.databricks.sdk.service.postgres.Endpoint`. +* Add `excludedTableFullNames` field for `com.databricks.sdk.service.qualitymonitorv2.AnomalyDetectionConfig`. +* Add `EXECUTE` and `USE_CONNECTION` enum values for `com.databricks.sdk.service.apps.AppManifestAppResourceUcSecurableSpecUcSecurablePermission`. +* Add `FUNCTION` and `CONNECTION` enum values for `com.databricks.sdk.service.apps.AppManifestAppResourceUcSecurableSpecUcSecurableType`. +* Add `SELECT`, `EXECUTE` and `USE_CONNECTION` enum values for `com.databricks.sdk.service.apps.AppResourceUcSecurableUcSecurablePermission`. +* Add `TABLE`, `FUNCTION` and `CONNECTION` enum values for `com.databricks.sdk.service.apps.AppResourceUcSecurableUcSecurableType`. +* [Breaking] Remove `autoscalingLimitMaxCu`, `autoscalingLimitMinCu`, `currentState`, `disabled`, `effectiveAutoscalingLimitMaxCu`, `effectiveAutoscalingLimitMinCu`, `effectiveDisabled`, `effectivePoolerMode`, `effectiveSettings`, `effectiveSuspendTimeoutDuration`, `endpointType`, `host`, `lastActiveTime`, `pendingState`, `poolerMode`, `settings`, `startTime`, `suspendTime` and `suspendTimeoutDuration` fields for `com.databricks.sdk.service.postgres.Endpoint`. \ No newline at end of file diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java index d7ef3d38a..d403abc7f 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java @@ -64,6 +64,13 @@ public class App { @JsonProperty("effective_user_api_scopes") private Collection effectiveUserApiScopes; + /** + * Git repository configuration for app deployments. When specified, deployments can reference + * code from this repository by providing only the git reference (branch, tag, or commit). + */ + @JsonProperty("git_repository") + private GitRepository gitRepository; + /** The unique identifier of the app. */ @JsonProperty("id") private String id; @@ -234,6 +241,15 @@ public Collection getEffectiveUserApiScopes() { return effectiveUserApiScopes; } + public App setGitRepository(GitRepository gitRepository) { + this.gitRepository = gitRepository; + return this; + } + + public GitRepository getGitRepository() { + return gitRepository; + } + public App setId(String id) { this.id = id; return this; @@ -377,6 +393,7 @@ public boolean equals(Object o) { && Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId) && Objects.equals(effectiveUsagePolicyId, that.effectiveUsagePolicyId) && Objects.equals(effectiveUserApiScopes, that.effectiveUserApiScopes) + && Objects.equals(gitRepository, that.gitRepository) && Objects.equals(id, that.id) && Objects.equals(name, that.name) && Objects.equals(oauth2AppClientId, that.oauth2AppClientId) @@ -408,6 +425,7 @@ public int hashCode() { effectiveBudgetPolicyId, effectiveUsagePolicyId, effectiveUserApiScopes, + gitRepository, id, name, oauth2AppClientId, @@ -439,6 +457,7 @@ public String toString() { .add("effectiveBudgetPolicyId", effectiveBudgetPolicyId) .add("effectiveUsagePolicyId", effectiveUsagePolicyId) .add("effectiveUserApiScopes", effectiveUserApiScopes) + .add("gitRepository", gitRepository) .add("id", id) .add("name", name) .add("oauth2AppClientId", oauth2AppClientId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeployment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeployment.java old mode 100755 new mode 100644 index 0961135b1..f127a9538 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeployment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeployment.java @@ -25,6 +25,10 @@ public class AppDeployment { @JsonProperty("deployment_id") private String deploymentId; + /** Git repository to use as the source for the app deployment. */ + @JsonProperty("git_source") + private GitSource gitSource; + /** The mode of which the deployment will manage the source code. */ @JsonProperty("mode") private AppDeploymentMode mode; @@ -83,6 +87,15 @@ public String getDeploymentId() { return deploymentId; } + public AppDeployment setGitSource(GitSource gitSource) { + this.gitSource = gitSource; + return this; + } + + public GitSource getGitSource() { + return gitSource; + } + public AppDeployment setMode(AppDeploymentMode mode) { this.mode = mode; return this; @@ -128,6 +141,7 @@ public boolean equals(Object o) { && Objects.equals(creator, that.creator) && Objects.equals(deploymentArtifacts, that.deploymentArtifacts) && Objects.equals(deploymentId, that.deploymentId) + && Objects.equals(gitSource, that.gitSource) && Objects.equals(mode, that.mode) && Objects.equals(sourceCodePath, that.sourceCodePath) && Objects.equals(status, that.status) @@ -141,6 +155,7 @@ public int hashCode() { creator, deploymentArtifacts, deploymentId, + gitSource, mode, sourceCodePath, status, @@ -154,6 +169,7 @@ public String toString() { .add("creator", creator) .add("deploymentArtifacts", deploymentArtifacts) .add("deploymentId", deploymentId) + .add("gitSource", gitSource) .add("mode", mode) .add("sourceCodePath", sourceCodePath) .add("status", status) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceExperimentSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceExperimentSpec.java new file mode 100644 index 000000000..2df07f818 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceExperimentSpec.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AppManifestAppResourceExperimentSpec { + /** */ + @JsonProperty("permission") + private AppManifestAppResourceExperimentSpecExperimentPermission permission; + + public AppManifestAppResourceExperimentSpec setPermission( + AppManifestAppResourceExperimentSpecExperimentPermission permission) { + this.permission = permission; + return this; + } + + public AppManifestAppResourceExperimentSpecExperimentPermission getPermission() { + return permission; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AppManifestAppResourceExperimentSpec that = (AppManifestAppResourceExperimentSpec) o; + return Objects.equals(permission, that.permission); + } + + @Override + public int hashCode() { + return Objects.hash(permission); + } + + @Override + public String toString() { + return new ToStringer(AppManifestAppResourceExperimentSpec.class) + .add("permission", permission) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceExperimentSpecExperimentPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceExperimentSpecExperimentPermission.java new file mode 100644 index 000000000..27db3de37 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceExperimentSpecExperimentPermission.java @@ -0,0 +1,12 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum AppManifestAppResourceExperimentSpecExperimentPermission { + CAN_EDIT, + CAN_MANAGE, + CAN_READ, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceSpec.java old mode 100755 new mode 100644 index f4487f6e5..70c87f518 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceSpec.java @@ -17,6 +17,10 @@ public class AppManifestAppResourceSpec { @JsonProperty("description") private String description; + /** */ + @JsonProperty("experiment_spec") + private AppManifestAppResourceExperimentSpec experimentSpec; + /** */ @JsonProperty("job_spec") private AppManifestAppResourceJobSpec jobSpec; @@ -50,6 +54,16 @@ public String getDescription() { return description; } + public AppManifestAppResourceSpec setExperimentSpec( + AppManifestAppResourceExperimentSpec experimentSpec) { + this.experimentSpec = experimentSpec; + return this; + } + + public AppManifestAppResourceExperimentSpec getExperimentSpec() { + return experimentSpec; + } + public AppManifestAppResourceSpec setJobSpec(AppManifestAppResourceJobSpec jobSpec) { this.jobSpec = jobSpec; return this; @@ -113,6 +127,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; AppManifestAppResourceSpec that = (AppManifestAppResourceSpec) o; return Objects.equals(description, that.description) + && Objects.equals(experimentSpec, that.experimentSpec) && Objects.equals(jobSpec, that.jobSpec) && Objects.equals(name, that.name) && Objects.equals(secretSpec, that.secretSpec) @@ -125,6 +140,7 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( description, + experimentSpec, jobSpec, name, secretSpec, @@ -137,6 +153,7 @@ public int hashCode() { public String toString() { return new ToStringer(AppManifestAppResourceSpec.class) .add("description", description) + .add("experimentSpec", experimentSpec) .add("jobSpec", jobSpec) .add("name", name) .add("secretSpec", secretSpec) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceUcSecurableSpecUcSecurablePermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceUcSecurableSpecUcSecurablePermission.java old mode 100755 new mode 100644 index 1a614264f..a5c0749fa --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceUcSecurableSpecUcSecurablePermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceUcSecurableSpecUcSecurablePermission.java @@ -6,8 +6,10 @@ @Generated public enum AppManifestAppResourceUcSecurableSpecUcSecurablePermission { + EXECUTE, MANAGE, READ_VOLUME, SELECT, + USE_CONNECTION, WRITE_VOLUME, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceUcSecurableSpecUcSecurableType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceUcSecurableSpecUcSecurableType.java old mode 100755 new mode 100644 index 45cccb2d5..d8d452c05 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceUcSecurableSpecUcSecurableType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceUcSecurableSpecUcSecurableType.java @@ -6,6 +6,8 @@ @Generated public enum AppManifestAppResourceUcSecurableSpecUcSecurableType { + CONNECTION, + FUNCTION, TABLE, VOLUME, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java old mode 100755 new mode 100644 index 2761c1651..e46cb6e8e --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java @@ -17,6 +17,10 @@ public class AppResource { @JsonProperty("description") private String description; + /** */ + @JsonProperty("experiment") + private AppResourceExperiment experiment; + /** */ @JsonProperty("genie_space") private AppResourceGenieSpace genieSpace; @@ -63,6 +67,15 @@ public String getDescription() { return description; } + public AppResource setExperiment(AppResourceExperiment experiment) { + this.experiment = experiment; + return this; + } + + public AppResourceExperiment getExperiment() { + return experiment; + } + public AppResource setGenieSpace(AppResourceGenieSpace genieSpace) { this.genieSpace = genieSpace; return this; @@ -133,6 +146,7 @@ public boolean equals(Object o) { AppResource that = (AppResource) o; return Objects.equals(database, that.database) && Objects.equals(description, that.description) + && Objects.equals(experiment, that.experiment) && Objects.equals(genieSpace, that.genieSpace) && Objects.equals(job, that.job) && Objects.equals(name, that.name) @@ -147,6 +161,7 @@ public int hashCode() { return Objects.hash( database, description, + experiment, genieSpace, job, name, @@ -161,6 +176,7 @@ public String toString() { return new ToStringer(AppResource.class) .add("database", database) .add("description", description) + .add("experiment", experiment) .add("genieSpace", genieSpace) .add("job", job) .add("name", name) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceExperiment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceExperiment.java new file mode 100644 index 000000000..6f194ec5c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceExperiment.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AppResourceExperiment { + /** */ + @JsonProperty("experiment_id") + private String experimentId; + + /** */ + @JsonProperty("permission") + private AppResourceExperimentExperimentPermission permission; + + public AppResourceExperiment setExperimentId(String experimentId) { + this.experimentId = experimentId; + return this; + } + + public String getExperimentId() { + return experimentId; + } + + public AppResourceExperiment setPermission(AppResourceExperimentExperimentPermission permission) { + this.permission = permission; + return this; + } + + public AppResourceExperimentExperimentPermission getPermission() { + return permission; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AppResourceExperiment that = (AppResourceExperiment) o; + return Objects.equals(experimentId, that.experimentId) + && Objects.equals(permission, that.permission); + } + + @Override + public int hashCode() { + return Objects.hash(experimentId, permission); + } + + @Override + public String toString() { + return new ToStringer(AppResourceExperiment.class) + .add("experimentId", experimentId) + .add("permission", permission) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceExperimentExperimentPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceExperimentExperimentPermission.java new file mode 100644 index 000000000..db12ec2b0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceExperimentExperimentPermission.java @@ -0,0 +1,12 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum AppResourceExperimentExperimentPermission { + CAN_EDIT, + CAN_MANAGE, + CAN_READ, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurableUcSecurablePermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurableUcSecurablePermission.java old mode 100755 new mode 100644 index 551de2d21..6ed176b3c --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurableUcSecurablePermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurableUcSecurablePermission.java @@ -6,6 +6,9 @@ @Generated public enum AppResourceUcSecurableUcSecurablePermission { + EXECUTE, READ_VOLUME, + SELECT, + USE_CONNECTION, WRITE_VOLUME, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurableUcSecurableType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurableUcSecurableType.java old mode 100755 new mode 100644 index b05b5435e..c97224882 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurableUcSecurableType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurableUcSecurableType.java @@ -6,5 +6,8 @@ @Generated public enum AppResourceUcSecurableUcSecurableType { + CONNECTION, + FUNCTION, + TABLE, VOLUME, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdate.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdate.java old mode 100755 new mode 100644 index b34c390e7..ff4970a5a --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdate.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdate.java @@ -22,6 +22,10 @@ public class AppUpdate { @JsonProperty("description") private String description; + /** */ + @JsonProperty("git_repository") + private GitRepository gitRepository; + /** */ @JsonProperty("resources") private Collection resources; @@ -65,6 +69,15 @@ public String getDescription() { return description; } + public AppUpdate setGitRepository(GitRepository gitRepository) { + this.gitRepository = gitRepository; + return this; + } + + public GitRepository getGitRepository() { + return gitRepository; + } + public AppUpdate setResources(Collection resources) { this.resources = resources; return this; @@ -109,6 +122,7 @@ public boolean equals(Object o) { return Objects.equals(budgetPolicyId, that.budgetPolicyId) && Objects.equals(computeSize, that.computeSize) && Objects.equals(description, that.description) + && Objects.equals(gitRepository, that.gitRepository) && Objects.equals(resources, that.resources) && Objects.equals(status, that.status) && Objects.equals(usagePolicyId, that.usagePolicyId) @@ -118,7 +132,14 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - budgetPolicyId, computeSize, description, resources, status, usagePolicyId, userApiScopes); + budgetPolicyId, + computeSize, + description, + gitRepository, + resources, + status, + usagePolicyId, + userApiScopes); } @Override @@ -127,6 +148,7 @@ public String toString() { .add("budgetPolicyId", budgetPolicyId) .add("computeSize", computeSize) .add("description", description) + .add("gitRepository", gitRepository) .add("resources", resources) .add("status", status) .add("usagePolicyId", usagePolicyId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GitRepository.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GitRepository.java new file mode 100644 index 000000000..bd523a790 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GitRepository.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Git repository configuration specifying the location of the repository. */ +@Generated +public class GitRepository { + /** + * Git provider. Case insensitive. Supported values: gitHub, gitHubEnterprise, bitbucketCloud, + * bitbucketServer, azureDevOpsServices, gitLab, gitLabEnterpriseEdition, awsCodeCommit. + */ + @JsonProperty("provider") + private String provider; + + /** URL of the Git repository. */ + @JsonProperty("url") + private String url; + + public GitRepository setProvider(String provider) { + this.provider = provider; + return this; + } + + public String getProvider() { + return provider; + } + + public GitRepository setUrl(String url) { + this.url = url; + return this; + } + + public String getUrl() { + return url; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GitRepository that = (GitRepository) o; + return Objects.equals(provider, that.provider) && Objects.equals(url, that.url); + } + + @Override + public int hashCode() { + return Objects.hash(provider, url); + } + + @Override + public String toString() { + return new ToStringer(GitRepository.class).add("provider", provider).add("url", url).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GitSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GitSource.java new file mode 100644 index 000000000..6b9728c1b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GitSource.java @@ -0,0 +1,128 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Complete git source specification including repository location and reference. */ +@Generated +public class GitSource { + /** Git branch to checkout. */ + @JsonProperty("branch") + private String branch; + + /** Git commit SHA to checkout. */ + @JsonProperty("commit") + private String commit; + + /** Git repository configuration. Populated from the app's git_repository configuration. */ + @JsonProperty("git_repository") + private GitRepository gitRepository; + + /** + * The resolved commit SHA that was actually used for the deployment. This is populated by the + * system after resolving the reference (branch, tag, or commit). If commit is specified directly, + * this will match commit. If a branch or tag is specified, this contains the commit SHA that the + * branch or tag pointed to at deployment time. + */ + @JsonProperty("resolved_commit") + private String resolvedCommit; + + /** + * Relative path to the app source code within the Git repository. If not specified, the root of + * the repository is used. + */ + @JsonProperty("source_code_path") + private String sourceCodePath; + + /** Git tag to checkout. */ + @JsonProperty("tag") + private String tag; + + public GitSource setBranch(String branch) { + this.branch = branch; + return this; + } + + public String getBranch() { + return branch; + } + + public GitSource setCommit(String commit) { + this.commit = commit; + return this; + } + + public String getCommit() { + return commit; + } + + public GitSource setGitRepository(GitRepository gitRepository) { + this.gitRepository = gitRepository; + return this; + } + + public GitRepository getGitRepository() { + return gitRepository; + } + + public GitSource setResolvedCommit(String resolvedCommit) { + this.resolvedCommit = resolvedCommit; + return this; + } + + public String getResolvedCommit() { + return resolvedCommit; + } + + public GitSource setSourceCodePath(String sourceCodePath) { + this.sourceCodePath = sourceCodePath; + return this; + } + + public String getSourceCodePath() { + return sourceCodePath; + } + + public GitSource setTag(String tag) { + this.tag = tag; + return this; + } + + public String getTag() { + return tag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GitSource that = (GitSource) o; + return Objects.equals(branch, that.branch) + && Objects.equals(commit, that.commit) + && Objects.equals(gitRepository, that.gitRepository) + && Objects.equals(resolvedCommit, that.resolvedCommit) + && Objects.equals(sourceCodePath, that.sourceCodePath) + && Objects.equals(tag, that.tag); + } + + @Override + public int hashCode() { + return Objects.hash(branch, commit, gitRepository, resolvedCommit, sourceCodePath, tag); + } + + @Override + public String toString() { + return new ToStringer(GitSource.class) + .add("branch", branch) + .add("commit", commit) + .add("gitRepository", gitRepository) + .add("resolvedCommit", resolvedCommit) + .add("sourceCodePath", sourceCodePath) + .add("tag", tag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java old mode 100755 new mode 100644 index 04137ed3e..44c7b1bc9 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java @@ -8,7 +8,7 @@ import java.util.Map; import java.util.Objects; -/** Next ID: 43 */ +/** Next ID: 44 */ @Generated public class SchemaInfo { /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java old mode 100755 new mode 100644 index 182c3d0f0..cbe5577c2 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java @@ -160,7 +160,8 @@ public class TableInfo { * View dependencies (when table_type == **VIEW** or **MATERIALIZED_VIEW**, **STREAMING_TABLE**) - * when DependencyList is None, the dependency is not provided; - when DependencyList is an empty * list, the dependency is provided but is empty; - when DependencyList is not an empty list, - * dependencies are provided and recorded. + * dependencies are provided and recorded. Note: this field is not set in the output of the + * __listTables__ API. */ @JsonProperty("view_dependencies") private DependencyList viewDependencies; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java old mode 100755 new mode 100644 index ac41bdf5a..5db6a605a --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java @@ -117,6 +117,9 @@ public Iterable list(String catalogName, String schemaName) { * privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. There is * no guarantee of a specific ordering of the elements in the array. * + *

NOTE: **view_dependencies** and **table_constraints** are not returned by ListTables + * queries. + * *

NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated * calls will be deprecated soon. * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java old mode 100755 new mode 100644 index 34f2b78ed..1efde490f --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java @@ -79,6 +79,9 @@ public interface TablesService { * privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. There is * no guarantee of a specific ordering of the elements in the array. * + *

NOTE: **view_dependencies** and **table_constraints** are not returned by ListTables + * queries. + * *

NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated * calls will be deprecated soon. * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AnomalyDetectionConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AnomalyDetectionConfig.java old mode 100755 new mode 100644 index 6fd6e421c..3230194c5 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AnomalyDetectionConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AnomalyDetectionConfig.java @@ -4,26 +4,44 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; import java.util.Objects; /** Anomaly Detection Configurations. */ @Generated public class AnomalyDetectionConfig { + /** List of fully qualified table names to exclude from anomaly detection. */ + @JsonProperty("excluded_table_full_names") + private Collection excludedTableFullNames; + + public AnomalyDetectionConfig setExcludedTableFullNames( + Collection excludedTableFullNames) { + this.excludedTableFullNames = excludedTableFullNames; + return this; + } + + public Collection getExcludedTableFullNames() { + return excludedTableFullNames; + } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - return true; + AnomalyDetectionConfig that = (AnomalyDetectionConfig) o; + return Objects.equals(excludedTableFullNames, that.excludedTableFullNames); } @Override public int hashCode() { - return Objects.hash(); + return Objects.hash(excludedTableFullNames); } @Override public String toString() { - return new ToStringer(AnomalyDetectionConfig.class).toString(); + return new ToStringer(AnomalyDetectionConfig.class) + .add("excludedTableFullNames", excludedTableFullNames) + .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobRunAs.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobRunAs.java old mode 100755 new mode 100644 index 53dc83891..dcbf39048 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobRunAs.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobRunAs.java @@ -16,6 +16,13 @@ */ @Generated public class JobRunAs { + /** + * Group name of an account group assigned to the workspace. Setting this field requires being a + * member of the group. + */ + @JsonProperty("group_name") + private String groupName; + /** * Application ID of an active service principal. Setting this field requires the * `servicePrincipal/user` role. @@ -30,6 +37,15 @@ public class JobRunAs { @JsonProperty("user_name") private String userName; + public JobRunAs setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + public JobRunAs setServicePrincipalName(String servicePrincipalName) { this.servicePrincipalName = servicePrincipalName; return this; @@ -53,18 +69,20 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; JobRunAs that = (JobRunAs) o; - return Objects.equals(servicePrincipalName, that.servicePrincipalName) + return Objects.equals(groupName, that.groupName) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) && Objects.equals(userName, that.userName); } @Override public int hashCode() { - return Objects.hash(servicePrincipalName, userName); + return Objects.hash(groupName, servicePrincipalName, userName); } @Override public String toString() { return new ToStringer(JobRunAs.class) + .add("groupName", groupName) .add("servicePrincipalName", servicePrincipalName) .add("userName", userName) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java old mode 100755 new mode 100644 index 4e8dc6013..5a1f380e3 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java @@ -37,6 +37,13 @@ public class TableSpecificConfig { private IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig queryBasedConnectorConfig; + /** + * (Optional, Immutable) The row filter condition to be applied to the table. It must not contain + * the WHERE keyword, only the actual filter condition. It must be in DBSQL format. + */ + @JsonProperty("row_filter") + private String rowFilter; + /** * If true, formula fields defined in the table are included in the ingestion. This setting is * only valid for the Salesforce connector @@ -98,6 +105,15 @@ public TableSpecificConfig setQueryBasedConnectorConfig( return queryBasedConnectorConfig; } + public TableSpecificConfig setRowFilter(String rowFilter) { + this.rowFilter = rowFilter; + return this; + } + + public String getRowFilter() { + return rowFilter; + } + public TableSpecificConfig setSalesforceIncludeFormulaFields( Boolean salesforceIncludeFormulaFields) { this.salesforceIncludeFormulaFields = salesforceIncludeFormulaFields; @@ -145,6 +161,7 @@ public boolean equals(Object o) { && Objects.equals(includeColumns, that.includeColumns) && Objects.equals(primaryKeys, that.primaryKeys) && Objects.equals(queryBasedConnectorConfig, that.queryBasedConnectorConfig) + && Objects.equals(rowFilter, that.rowFilter) && Objects.equals(salesforceIncludeFormulaFields, that.salesforceIncludeFormulaFields) && Objects.equals(scdType, that.scdType) && Objects.equals(sequenceBy, that.sequenceBy) @@ -158,6 +175,7 @@ public int hashCode() { includeColumns, primaryKeys, queryBasedConnectorConfig, + rowFilter, salesforceIncludeFormulaFields, scdType, sequenceBy, @@ -171,6 +189,7 @@ public String toString() { .add("includeColumns", includeColumns) .add("primaryKeys", primaryKeys) .add("queryBasedConnectorConfig", queryBasedConnectorConfig) + .add("rowFilter", rowFilter) .add("salesforceIncludeFormulaFields", salesforceIncludeFormulaFields) .add("scdType", scdType) .add("sequenceBy", sequenceBy) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Endpoint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Endpoint.java index b8adfb44f..7fded5cea 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Endpoint.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Endpoint.java @@ -5,79 +5,15 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.protobuf.Duration; import com.google.protobuf.Timestamp; import java.util.Objects; @Generated public class Endpoint { - /** The maximum number of Compute Units. */ - @JsonProperty("autoscaling_limit_max_cu") - private Double autoscalingLimitMaxCu; - - /** The minimum number of Compute Units. */ - @JsonProperty("autoscaling_limit_min_cu") - private Double autoscalingLimitMinCu; - /** A timestamp indicating when the compute endpoint was created. */ @JsonProperty("create_time") private Timestamp createTime; - /** */ - @JsonProperty("current_state") - private EndpointState currentState; - - /** - * Whether to restrict connections to the compute endpoint. Enabling this option schedules a - * suspend compute operation. A disabled compute endpoint cannot be enabled by a connection or - * console action. - */ - @JsonProperty("disabled") - private Boolean disabled; - - /** The maximum number of Compute Units. */ - @JsonProperty("effective_autoscaling_limit_max_cu") - private Double effectiveAutoscalingLimitMaxCu; - - /** The minimum number of Compute Units. */ - @JsonProperty("effective_autoscaling_limit_min_cu") - private Double effectiveAutoscalingLimitMinCu; - - /** - * Whether to restrict connections to the compute endpoint. Enabling this option schedules a - * suspend compute operation. A disabled compute endpoint cannot be enabled by a connection or - * console action. - */ - @JsonProperty("effective_disabled") - private Boolean effectiveDisabled; - - /** */ - @JsonProperty("effective_pooler_mode") - private EndpointPoolerMode effectivePoolerMode; - - /** */ - @JsonProperty("effective_settings") - private EndpointSettings effectiveSettings; - - /** Duration of inactivity after which the compute endpoint is automatically suspended. */ - @JsonProperty("effective_suspend_timeout_duration") - private Duration effectiveSuspendTimeoutDuration; - - /** The endpoint type. There could be only one READ_WRITE endpoint per branch. */ - @JsonProperty("endpoint_type") - private EndpointType endpointType; - - /** - * The hostname of the compute endpoint. This is the hostname specified when connecting to a - * database. - */ - @JsonProperty("host") - private String host; - - /** A timestamp indicating when the compute endpoint was last active. */ - @JsonProperty("last_active_time") - private Timestamp lastActiveTime; - /** * The resource name of the endpoint. Format: * projects/{project_id}/branches/{branch_id}/endpoints/{endpoint_id} @@ -89,29 +25,13 @@ public class Endpoint { @JsonProperty("parent") private String parent; - /** */ - @JsonProperty("pending_state") - private EndpointState pendingState; - - /** */ - @JsonProperty("pooler_mode") - private EndpointPoolerMode poolerMode; + /** The desired state of an Endpoint. */ + @JsonProperty("spec") + private EndpointSpec spec; /** */ - @JsonProperty("settings") - private EndpointSettings settings; - - /** A timestamp indicating when the compute endpoint was last started. */ - @JsonProperty("start_time") - private Timestamp startTime; - - /** A timestamp indicating when the compute endpoint was last suspended. */ - @JsonProperty("suspend_time") - private Timestamp suspendTime; - - /** Duration of inactivity after which the compute endpoint is automatically suspended. */ - @JsonProperty("suspend_timeout_duration") - private Duration suspendTimeoutDuration; + @JsonProperty("status") + private EndpointStatus status; /** System generated unique ID for the endpoint. */ @JsonProperty("uid") @@ -121,24 +41,6 @@ public class Endpoint { @JsonProperty("update_time") private Timestamp updateTime; - public Endpoint setAutoscalingLimitMaxCu(Double autoscalingLimitMaxCu) { - this.autoscalingLimitMaxCu = autoscalingLimitMaxCu; - return this; - } - - public Double getAutoscalingLimitMaxCu() { - return autoscalingLimitMaxCu; - } - - public Endpoint setAutoscalingLimitMinCu(Double autoscalingLimitMinCu) { - this.autoscalingLimitMinCu = autoscalingLimitMinCu; - return this; - } - - public Double getAutoscalingLimitMinCu() { - return autoscalingLimitMinCu; - } - public Endpoint setCreateTime(Timestamp createTime) { this.createTime = createTime; return this; @@ -148,105 +50,6 @@ public Timestamp getCreateTime() { return createTime; } - public Endpoint setCurrentState(EndpointState currentState) { - this.currentState = currentState; - return this; - } - - public EndpointState getCurrentState() { - return currentState; - } - - public Endpoint setDisabled(Boolean disabled) { - this.disabled = disabled; - return this; - } - - public Boolean getDisabled() { - return disabled; - } - - public Endpoint setEffectiveAutoscalingLimitMaxCu(Double effectiveAutoscalingLimitMaxCu) { - this.effectiveAutoscalingLimitMaxCu = effectiveAutoscalingLimitMaxCu; - return this; - } - - public Double getEffectiveAutoscalingLimitMaxCu() { - return effectiveAutoscalingLimitMaxCu; - } - - public Endpoint setEffectiveAutoscalingLimitMinCu(Double effectiveAutoscalingLimitMinCu) { - this.effectiveAutoscalingLimitMinCu = effectiveAutoscalingLimitMinCu; - return this; - } - - public Double getEffectiveAutoscalingLimitMinCu() { - return effectiveAutoscalingLimitMinCu; - } - - public Endpoint setEffectiveDisabled(Boolean effectiveDisabled) { - this.effectiveDisabled = effectiveDisabled; - return this; - } - - public Boolean getEffectiveDisabled() { - return effectiveDisabled; - } - - public Endpoint setEffectivePoolerMode(EndpointPoolerMode effectivePoolerMode) { - this.effectivePoolerMode = effectivePoolerMode; - return this; - } - - public EndpointPoolerMode getEffectivePoolerMode() { - return effectivePoolerMode; - } - - public Endpoint setEffectiveSettings(EndpointSettings effectiveSettings) { - this.effectiveSettings = effectiveSettings; - return this; - } - - public EndpointSettings getEffectiveSettings() { - return effectiveSettings; - } - - public Endpoint setEffectiveSuspendTimeoutDuration(Duration effectiveSuspendTimeoutDuration) { - this.effectiveSuspendTimeoutDuration = effectiveSuspendTimeoutDuration; - return this; - } - - public Duration getEffectiveSuspendTimeoutDuration() { - return effectiveSuspendTimeoutDuration; - } - - public Endpoint setEndpointType(EndpointType endpointType) { - this.endpointType = endpointType; - return this; - } - - public EndpointType getEndpointType() { - return endpointType; - } - - public Endpoint setHost(String host) { - this.host = host; - return this; - } - - public String getHost() { - return host; - } - - public Endpoint setLastActiveTime(Timestamp lastActiveTime) { - this.lastActiveTime = lastActiveTime; - return this; - } - - public Timestamp getLastActiveTime() { - return lastActiveTime; - } - public Endpoint setName(String name) { this.name = name; return this; @@ -265,58 +68,22 @@ public String getParent() { return parent; } - public Endpoint setPendingState(EndpointState pendingState) { - this.pendingState = pendingState; - return this; - } - - public EndpointState getPendingState() { - return pendingState; - } - - public Endpoint setPoolerMode(EndpointPoolerMode poolerMode) { - this.poolerMode = poolerMode; - return this; - } - - public EndpointPoolerMode getPoolerMode() { - return poolerMode; - } - - public Endpoint setSettings(EndpointSettings settings) { - this.settings = settings; - return this; - } - - public EndpointSettings getSettings() { - return settings; - } - - public Endpoint setStartTime(Timestamp startTime) { - this.startTime = startTime; - return this; - } - - public Timestamp getStartTime() { - return startTime; - } - - public Endpoint setSuspendTime(Timestamp suspendTime) { - this.suspendTime = suspendTime; + public Endpoint setSpec(EndpointSpec spec) { + this.spec = spec; return this; } - public Timestamp getSuspendTime() { - return suspendTime; + public EndpointSpec getSpec() { + return spec; } - public Endpoint setSuspendTimeoutDuration(Duration suspendTimeoutDuration) { - this.suspendTimeoutDuration = suspendTimeoutDuration; + public Endpoint setStatus(EndpointStatus status) { + this.status = status; return this; } - public Duration getSuspendTimeoutDuration() { - return suspendTimeoutDuration; + public EndpointStatus getStatus() { + return status; } public Endpoint setUid(String uid) { @@ -342,86 +109,28 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Endpoint that = (Endpoint) o; - return Objects.equals(autoscalingLimitMaxCu, that.autoscalingLimitMaxCu) - && Objects.equals(autoscalingLimitMinCu, that.autoscalingLimitMinCu) - && Objects.equals(createTime, that.createTime) - && Objects.equals(currentState, that.currentState) - && Objects.equals(disabled, that.disabled) - && Objects.equals(effectiveAutoscalingLimitMaxCu, that.effectiveAutoscalingLimitMaxCu) - && Objects.equals(effectiveAutoscalingLimitMinCu, that.effectiveAutoscalingLimitMinCu) - && Objects.equals(effectiveDisabled, that.effectiveDisabled) - && Objects.equals(effectivePoolerMode, that.effectivePoolerMode) - && Objects.equals(effectiveSettings, that.effectiveSettings) - && Objects.equals(effectiveSuspendTimeoutDuration, that.effectiveSuspendTimeoutDuration) - && Objects.equals(endpointType, that.endpointType) - && Objects.equals(host, that.host) - && Objects.equals(lastActiveTime, that.lastActiveTime) + return Objects.equals(createTime, that.createTime) && Objects.equals(name, that.name) && Objects.equals(parent, that.parent) - && Objects.equals(pendingState, that.pendingState) - && Objects.equals(poolerMode, that.poolerMode) - && Objects.equals(settings, that.settings) - && Objects.equals(startTime, that.startTime) - && Objects.equals(suspendTime, that.suspendTime) - && Objects.equals(suspendTimeoutDuration, that.suspendTimeoutDuration) + && Objects.equals(spec, that.spec) + && Objects.equals(status, that.status) && Objects.equals(uid, that.uid) && Objects.equals(updateTime, that.updateTime); } @Override public int hashCode() { - return Objects.hash( - autoscalingLimitMaxCu, - autoscalingLimitMinCu, - createTime, - currentState, - disabled, - effectiveAutoscalingLimitMaxCu, - effectiveAutoscalingLimitMinCu, - effectiveDisabled, - effectivePoolerMode, - effectiveSettings, - effectiveSuspendTimeoutDuration, - endpointType, - host, - lastActiveTime, - name, - parent, - pendingState, - poolerMode, - settings, - startTime, - suspendTime, - suspendTimeoutDuration, - uid, - updateTime); + return Objects.hash(createTime, name, parent, spec, status, uid, updateTime); } @Override public String toString() { return new ToStringer(Endpoint.class) - .add("autoscalingLimitMaxCu", autoscalingLimitMaxCu) - .add("autoscalingLimitMinCu", autoscalingLimitMinCu) .add("createTime", createTime) - .add("currentState", currentState) - .add("disabled", disabled) - .add("effectiveAutoscalingLimitMaxCu", effectiveAutoscalingLimitMaxCu) - .add("effectiveAutoscalingLimitMinCu", effectiveAutoscalingLimitMinCu) - .add("effectiveDisabled", effectiveDisabled) - .add("effectivePoolerMode", effectivePoolerMode) - .add("effectiveSettings", effectiveSettings) - .add("effectiveSuspendTimeoutDuration", effectiveSuspendTimeoutDuration) - .add("endpointType", endpointType) - .add("host", host) - .add("lastActiveTime", lastActiveTime) .add("name", name) .add("parent", parent) - .add("pendingState", pendingState) - .add("poolerMode", poolerMode) - .add("settings", settings) - .add("startTime", startTime) - .add("suspendTime", suspendTime) - .add("suspendTimeoutDuration", suspendTimeoutDuration) + .add("spec", spec) + .add("status", status) .add("uid", uid) .add("updateTime", updateTime) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointSpec.java new file mode 100644 index 000000000..74fe3672b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointSpec.java @@ -0,0 +1,146 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.protobuf.Duration; +import java.util.Objects; + +@Generated +public class EndpointSpec { + /** The maximum number of Compute Units. */ + @JsonProperty("autoscaling_limit_max_cu") + private Double autoscalingLimitMaxCu; + + /** The minimum number of Compute Units. */ + @JsonProperty("autoscaling_limit_min_cu") + private Double autoscalingLimitMinCu; + + /** + * Whether to restrict connections to the compute endpoint. Enabling this option schedules a + * suspend compute operation. A disabled compute endpoint cannot be enabled by a connection or + * console action. + */ + @JsonProperty("disabled") + private Boolean disabled; + + /** The endpoint type. A branch can only have one READ_WRITE endpoint. */ + @JsonProperty("endpoint_type") + private EndpointType endpointType; + + /** */ + @JsonProperty("pooler_mode") + private EndpointPoolerMode poolerMode; + + /** */ + @JsonProperty("settings") + private EndpointSettings settings; + + /** Duration of inactivity after which the compute endpoint is automatically suspended. */ + @JsonProperty("suspend_timeout_duration") + private Duration suspendTimeoutDuration; + + public EndpointSpec setAutoscalingLimitMaxCu(Double autoscalingLimitMaxCu) { + this.autoscalingLimitMaxCu = autoscalingLimitMaxCu; + return this; + } + + public Double getAutoscalingLimitMaxCu() { + return autoscalingLimitMaxCu; + } + + public EndpointSpec setAutoscalingLimitMinCu(Double autoscalingLimitMinCu) { + this.autoscalingLimitMinCu = autoscalingLimitMinCu; + return this; + } + + public Double getAutoscalingLimitMinCu() { + return autoscalingLimitMinCu; + } + + public EndpointSpec setDisabled(Boolean disabled) { + this.disabled = disabled; + return this; + } + + public Boolean getDisabled() { + return disabled; + } + + public EndpointSpec setEndpointType(EndpointType endpointType) { + this.endpointType = endpointType; + return this; + } + + public EndpointType getEndpointType() { + return endpointType; + } + + public EndpointSpec setPoolerMode(EndpointPoolerMode poolerMode) { + this.poolerMode = poolerMode; + return this; + } + + public EndpointPoolerMode getPoolerMode() { + return poolerMode; + } + + public EndpointSpec setSettings(EndpointSettings settings) { + this.settings = settings; + return this; + } + + public EndpointSettings getSettings() { + return settings; + } + + public EndpointSpec setSuspendTimeoutDuration(Duration suspendTimeoutDuration) { + this.suspendTimeoutDuration = suspendTimeoutDuration; + return this; + } + + public Duration getSuspendTimeoutDuration() { + return suspendTimeoutDuration; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EndpointSpec that = (EndpointSpec) o; + return Objects.equals(autoscalingLimitMaxCu, that.autoscalingLimitMaxCu) + && Objects.equals(autoscalingLimitMinCu, that.autoscalingLimitMinCu) + && Objects.equals(disabled, that.disabled) + && Objects.equals(endpointType, that.endpointType) + && Objects.equals(poolerMode, that.poolerMode) + && Objects.equals(settings, that.settings) + && Objects.equals(suspendTimeoutDuration, that.suspendTimeoutDuration); + } + + @Override + public int hashCode() { + return Objects.hash( + autoscalingLimitMaxCu, + autoscalingLimitMinCu, + disabled, + endpointType, + poolerMode, + settings, + suspendTimeoutDuration); + } + + @Override + public String toString() { + return new ToStringer(EndpointSpec.class) + .add("autoscalingLimitMaxCu", autoscalingLimitMaxCu) + .add("autoscalingLimitMinCu", autoscalingLimitMinCu) + .add("disabled", disabled) + .add("endpointType", endpointType) + .add("poolerMode", poolerMode) + .add("settings", settings) + .add("suspendTimeoutDuration", suspendTimeoutDuration) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatus.java new file mode 100644 index 000000000..827dde4e8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatus.java @@ -0,0 +1,247 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.protobuf.Duration; +import com.google.protobuf.Timestamp; +import java.util.Objects; + +/** The current status of an Endpoint. */ +@Generated +public class EndpointStatus { + /** The maximum number of Compute Units. */ + @JsonProperty("autoscaling_limit_max_cu") + private Double autoscalingLimitMaxCu; + + /** The minimum number of Compute Units. */ + @JsonProperty("autoscaling_limit_min_cu") + private Double autoscalingLimitMinCu; + + /** */ + @JsonProperty("current_state") + private EndpointStatusState currentState; + + /** + * Whether to restrict connections to the compute endpoint. Enabling this option schedules a + * suspend compute operation. A disabled compute endpoint cannot be enabled by a connection or + * console action. + */ + @JsonProperty("disabled") + private Boolean disabled; + + /** The endpoint type. A branch can only have one READ_WRITE endpoint. */ + @JsonProperty("endpoint_type") + private EndpointType endpointType; + + /** + * The hostname of the compute endpoint. This is the hostname specified when connecting to a + * database. + */ + @JsonProperty("host") + private String host; + + /** A timestamp indicating when the compute endpoint was last active. */ + @JsonProperty("last_active_time") + private Timestamp lastActiveTime; + + /** */ + @JsonProperty("pending_state") + private EndpointStatusState pendingState; + + /** */ + @JsonProperty("pooler_mode") + private EndpointPoolerMode poolerMode; + + /** */ + @JsonProperty("settings") + private EndpointSettings settings; + + /** A timestamp indicating when the compute endpoint was last started. */ + @JsonProperty("start_time") + private Timestamp startTime; + + /** A timestamp indicating when the compute endpoint was last suspended. */ + @JsonProperty("suspend_time") + private Timestamp suspendTime; + + /** Duration of inactivity after which the compute endpoint is automatically suspended. */ + @JsonProperty("suspend_timeout_duration") + private Duration suspendTimeoutDuration; + + public EndpointStatus setAutoscalingLimitMaxCu(Double autoscalingLimitMaxCu) { + this.autoscalingLimitMaxCu = autoscalingLimitMaxCu; + return this; + } + + public Double getAutoscalingLimitMaxCu() { + return autoscalingLimitMaxCu; + } + + public EndpointStatus setAutoscalingLimitMinCu(Double autoscalingLimitMinCu) { + this.autoscalingLimitMinCu = autoscalingLimitMinCu; + return this; + } + + public Double getAutoscalingLimitMinCu() { + return autoscalingLimitMinCu; + } + + public EndpointStatus setCurrentState(EndpointStatusState currentState) { + this.currentState = currentState; + return this; + } + + public EndpointStatusState getCurrentState() { + return currentState; + } + + public EndpointStatus setDisabled(Boolean disabled) { + this.disabled = disabled; + return this; + } + + public Boolean getDisabled() { + return disabled; + } + + public EndpointStatus setEndpointType(EndpointType endpointType) { + this.endpointType = endpointType; + return this; + } + + public EndpointType getEndpointType() { + return endpointType; + } + + public EndpointStatus setHost(String host) { + this.host = host; + return this; + } + + public String getHost() { + return host; + } + + public EndpointStatus setLastActiveTime(Timestamp lastActiveTime) { + this.lastActiveTime = lastActiveTime; + return this; + } + + public Timestamp getLastActiveTime() { + return lastActiveTime; + } + + public EndpointStatus setPendingState(EndpointStatusState pendingState) { + this.pendingState = pendingState; + return this; + } + + public EndpointStatusState getPendingState() { + return pendingState; + } + + public EndpointStatus setPoolerMode(EndpointPoolerMode poolerMode) { + this.poolerMode = poolerMode; + return this; + } + + public EndpointPoolerMode getPoolerMode() { + return poolerMode; + } + + public EndpointStatus setSettings(EndpointSettings settings) { + this.settings = settings; + return this; + } + + public EndpointSettings getSettings() { + return settings; + } + + public EndpointStatus setStartTime(Timestamp startTime) { + this.startTime = startTime; + return this; + } + + public Timestamp getStartTime() { + return startTime; + } + + public EndpointStatus setSuspendTime(Timestamp suspendTime) { + this.suspendTime = suspendTime; + return this; + } + + public Timestamp getSuspendTime() { + return suspendTime; + } + + public EndpointStatus setSuspendTimeoutDuration(Duration suspendTimeoutDuration) { + this.suspendTimeoutDuration = suspendTimeoutDuration; + return this; + } + + public Duration getSuspendTimeoutDuration() { + return suspendTimeoutDuration; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EndpointStatus that = (EndpointStatus) o; + return Objects.equals(autoscalingLimitMaxCu, that.autoscalingLimitMaxCu) + && Objects.equals(autoscalingLimitMinCu, that.autoscalingLimitMinCu) + && Objects.equals(currentState, that.currentState) + && Objects.equals(disabled, that.disabled) + && Objects.equals(endpointType, that.endpointType) + && Objects.equals(host, that.host) + && Objects.equals(lastActiveTime, that.lastActiveTime) + && Objects.equals(pendingState, that.pendingState) + && Objects.equals(poolerMode, that.poolerMode) + && Objects.equals(settings, that.settings) + && Objects.equals(startTime, that.startTime) + && Objects.equals(suspendTime, that.suspendTime) + && Objects.equals(suspendTimeoutDuration, that.suspendTimeoutDuration); + } + + @Override + public int hashCode() { + return Objects.hash( + autoscalingLimitMaxCu, + autoscalingLimitMinCu, + currentState, + disabled, + endpointType, + host, + lastActiveTime, + pendingState, + poolerMode, + settings, + startTime, + suspendTime, + suspendTimeoutDuration); + } + + @Override + public String toString() { + return new ToStringer(EndpointStatus.class) + .add("autoscalingLimitMaxCu", autoscalingLimitMaxCu) + .add("autoscalingLimitMinCu", autoscalingLimitMinCu) + .add("currentState", currentState) + .add("disabled", disabled) + .add("endpointType", endpointType) + .add("host", host) + .add("lastActiveTime", lastActiveTime) + .add("pendingState", pendingState) + .add("poolerMode", poolerMode) + .add("settings", settings) + .add("startTime", startTime) + .add("suspendTime", suspendTime) + .add("suspendTimeoutDuration", suspendTimeoutDuration) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatusState.java similarity index 88% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointState.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatusState.java index b0d141670..57714e82b 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointState.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatusState.java @@ -6,7 +6,7 @@ /** The state of the compute endpoint. */ @Generated -public enum EndpointState { +public enum EndpointStatusState { ACTIVE, IDLE, INIT, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java old mode 100755 new mode 100644 index 5b14da636..c3d49125e --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java @@ -5,10 +5,15 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; import java.util.Objects; @Generated public class AnomalyDetectionConfig { + /** List of fully qualified table names to exclude from anomaly detection. */ + @JsonProperty("excluded_table_full_names") + private Collection excludedTableFullNames; + /** Run id of the last run of the workflow */ @JsonProperty("last_run_id") private String lastRunId; @@ -17,6 +22,16 @@ public class AnomalyDetectionConfig { @JsonProperty("latest_run_status") private AnomalyDetectionRunStatus latestRunStatus; + public AnomalyDetectionConfig setExcludedTableFullNames( + Collection excludedTableFullNames) { + this.excludedTableFullNames = excludedTableFullNames; + return this; + } + + public Collection getExcludedTableFullNames() { + return excludedTableFullNames; + } + public AnomalyDetectionConfig setLastRunId(String lastRunId) { this.lastRunId = lastRunId; return this; @@ -40,18 +55,20 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; AnomalyDetectionConfig that = (AnomalyDetectionConfig) o; - return Objects.equals(lastRunId, that.lastRunId) + return Objects.equals(excludedTableFullNames, that.excludedTableFullNames) + && Objects.equals(lastRunId, that.lastRunId) && Objects.equals(latestRunStatus, that.latestRunStatus); } @Override public int hashCode() { - return Objects.hash(lastRunId, latestRunStatus); + return Objects.hash(excludedTableFullNames, lastRunId, latestRunStatus); } @Override public String toString() { return new ToStringer(AnomalyDetectionConfig.class) + .add("excludedTableFullNames", excludedTableFullNames) .add("lastRunId", lastRunId) .add("latestRunStatus", latestRunStatus) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObject.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObject.java old mode 100755 new mode 100644 index f6e090ef5..071f11697 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObject.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObject.java @@ -58,10 +58,29 @@ public class SharedDataObject { private Collection partitions; /** - * A user-provided new name for the data object within the share. If this new - * name is not provided, the object's original name will be used as the - * `shared_as` name. The `shared_as` name must be unique within a share. For - * tables, the new name must follow the format of `.`. + * A user-provided alias name for table-like data objects within the share. + * + * Use this field for table-like objects (for example: TABLE, VIEW, + * MATERIALIZED_VIEW, STREAMING_TABLE, FOREIGN_TABLE). For non-table objects + * (for example: VOLUME, MODEL, NOTEBOOK_FILE, FUNCTION), use + * `string_shared_as` instead. + * + * Important: For non-table objects, this field must be omitted entirely. + * + * Format: Must be a 2-part name `.` (e.g., + * "sales_schema.orders_table") - Both schema and table names must contain + * only alphanumeric characters and underscores - No periods, spaces, forward + * slashes, or control characters are allowed within each part - Do not + * include the catalog name (use 2 parts, not 3) + * + * Behavior: - If not provided, the service automatically generates the alias + * as `.
` from the object's original name - If you don't want + * to specify this field, omit it entirely from the request (do not pass an + * empty string) - The `shared_as` name must be unique within the share + * + * Examples: - Valid: "analytics_schema.customer_view" - Invalid: + * "catalog.analytics_schema.customer_view" (3 parts not allowed) - Invalid: + * "analytics-schema.customer-view" (hyphens not allowed) */ @JsonProperty("shared_as") private String sharedAs; @@ -82,10 +101,30 @@ public class SharedDataObject { private SharedDataObjectStatus status; /** - * A user-provided new name for the shared object within the share. If this new name is not not - * provided, the object's original name will be used as the `string_shared_as` name. The - * `string_shared_as` name must be unique for objects of the same type within a Share. For - * notebooks, the new name should be the new notebook file name. + * A user-provided alias name for non-table data objects within the share. + * + *

Use this field for non-table objects (for example: VOLUME, MODEL, NOTEBOOK_FILE, FUNCTION). + * For table-like objects (for example: TABLE, VIEW, MATERIALIZED_VIEW, STREAMING_TABLE, + * FOREIGN_TABLE), use `shared_as` instead. + * + *

Important: For table-like objects, this field must be omitted entirely. + * + *

Format: - For VOLUME: Must be a 2-part name `.` (e.g., + * "data_schema.ml_models") - For FUNCTION: Must be a 2-part name `.` + * (e.g., "udf_schema.calculate_tax") - For MODEL: Must be a 2-part name + * `.` (e.g., "models.prediction_model") - For NOTEBOOK_FILE: Should be + * the notebook file name (e.g., "analysis_notebook.py") - All names must contain only + * alphanumeric characters and underscores - No periods, spaces, forward slashes, or control + * characters are allowed within each part + * + *

Behavior: - If not provided, the service automatically generates the alias from the object's + * original name - If you don't want to specify this field, omit it entirely from the request (do + * not pass an empty string) - The `string_shared_as` name must be unique for objects of the same + * type within the share + * + *

Examples: - Valid for VOLUME: "data_schema.training_data" - Valid for FUNCTION: + * "analytics.calculate_revenue" - Invalid: "catalog.data_schema.training_data" (3 parts not + * allowed for volumes) - Invalid: "data-schema.training-data" (hyphens not allowed) */ @JsonProperty("string_shared_as") private String stringSharedAs;