diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 3fbd741428..75afa4ea6d 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -033bcb9242b006001e2cf3956896711681de1a8c \ No newline at end of file +90fefb5618fdecf7dfdf6be7d56c2213d24ad944 \ No newline at end of file diff --git a/.codegen/service.go.tmpl b/.codegen/service.go.tmpl index 68c4ed64d5..78cb7fa4e2 100644 --- a/.codegen/service.go.tmpl +++ b/.codegen/service.go.tmpl @@ -161,7 +161,7 @@ func new{{.PascalName}}() *cobra.Command { {{else if .Entity.IsEmpty }}// TODO: output-only field {{else if .IsComputed -}} {{else if .IsOutputOnly -}} - {{else if .Entity.Enum }}cmd.Flags().Var(&{{- template "request-body-obj" (dict "Method" $method "Field" .)}}, "{{.KebabName}}", `{{.Summary | without "`" | trimSuffix "."}}. Supported values: {{template "printArray" .Entity.Enum}}`) + {{else if .Entity.Enum }}cmd.Flags().Var(&{{- template "request-body-obj" (dict "Method" $method "Field" .)}}, "{{.KebabName}}", `{{.Summary | without "`" | trimSuffix "."}}{{if .Summary}}. {{end}}Supported values: {{template "printArray" .Entity.Enum}}`) {{else}}cmd.Flags().{{template "arg-type" .Entity}}(&{{- template "request-body-obj" (dict "Method" $method "Field" .)}}, "{{.KebabName}}", {{- template "request-body-obj" (dict "Method" $method "Field" .)}}, `{{.Summary | without "`"}}`) {{end}} {{- end -}} diff --git a/.gitattributes b/.gitattributes index 9afa4ebd0d..f637688f98 100755 --- a/.gitattributes +++ b/.gitattributes @@ -70,6 +70,7 @@ cmd/workspace/dashboards/dashboards.go linguist-generated=true cmd/workspace/data-sources/data-sources.go linguist-generated=true cmd/workspace/database/database.go linguist-generated=true cmd/workspace/default-namespace/default-namespace.go linguist-generated=true +cmd/workspace/default-warehouse-id/default-warehouse-id.go linguist-generated=true cmd/workspace/disable-legacy-access/disable-legacy-access.go linguist-generated=true cmd/workspace/disable-legacy-dbfs/disable-legacy-dbfs.go linguist-generated=true cmd/workspace/enable-export-notebook/enable-export-notebook.go linguist-generated=true @@ -77,7 +78,9 @@ cmd/workspace/enable-notebook-table-clipboard/enable-notebook-table-clipboard.go cmd/workspace/enable-results-downloading/enable-results-downloading.go linguist-generated=true cmd/workspace/enhanced-security-monitoring/enhanced-security-monitoring.go linguist-generated=true cmd/workspace/experiments/experiments.go linguist-generated=true +cmd/workspace/external-lineage/external-lineage.go linguist-generated=true cmd/workspace/external-locations/external-locations.go linguist-generated=true +cmd/workspace/external-metadata/external-metadata.go linguist-generated=true cmd/workspace/feature-store/feature-store.go linguist-generated=true cmd/workspace/forecasting/forecasting.go linguist-generated=true cmd/workspace/functions/functions.go linguist-generated=true @@ -94,6 +97,7 @@ cmd/workspace/lakeview-embedded/lakeview-embedded.go linguist-generated=true cmd/workspace/lakeview/lakeview.go linguist-generated=true cmd/workspace/libraries/libraries.go linguist-generated=true cmd/workspace/llm-proxy-partner-powered-workspace/llm-proxy-partner-powered-workspace.go linguist-generated=true +cmd/workspace/materialized-features/materialized-features.go linguist-generated=true cmd/workspace/metastores/metastores.go linguist-generated=true cmd/workspace/model-registry/model-registry.go linguist-generated=true cmd/workspace/model-versions/model-versions.go linguist-generated=true @@ -130,6 +134,7 @@ cmd/workspace/resource-quotas/resource-quotas.go linguist-generated=true cmd/workspace/restrict-workspace-admins/restrict-workspace-admins.go linguist-generated=true cmd/workspace/schemas/schemas.go linguist-generated=true cmd/workspace/secrets/secrets.go linguist-generated=true +cmd/workspace/service-principal-secrets-proxy/service-principal-secrets-proxy.go linguist-generated=true cmd/workspace/service-principals/service-principals.go linguist-generated=true cmd/workspace/serving-endpoints/serving-endpoints.go linguist-generated=true cmd/workspace/settings/settings.go linguist-generated=true diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index f789869b3e..791655c7f7 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -5,6 +5,7 @@ ### Notable Changes ### Dependency updates +* Upgrade Go SDK to 0.75.0 ([#3256](https://github.com/databricks/cli/pull/3256)) ### CLI diff --git a/acceptance/help/output.txt b/acceptance/help/output.txt index ca2a59a9d8..8ab460c891 100644 --- a/acceptance/help/output.txt +++ b/acceptance/help/output.txt @@ -57,7 +57,9 @@ Unity Catalog catalogs A catalog is the first layer of Unity Catalog’s three-level namespace. connections Connections allow for creating a connection to an external data source. credentials A credential represents an authentication and authorization mechanism for accessing services on your cloud tenant. + external-lineage External Lineage APIs enable defining and managing lineage relationships between Databricks objects and external systems. external-locations An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path. + external-metadata External Metadata objects enable customers to register and manage metadata about external systems within Unity Catalog. functions Functions implement User-Defined Functions (UDFs) in Unity Catalog. grants In Unity Catalog, data is secure by default. metastores A metastore is the top-level container of objects in Unity Catalog. @@ -122,7 +124,7 @@ Apps Clean Rooms clean-room-assets Clean room assets are data and code objects — Tables, volumes, and notebooks that are shared with the clean room. clean-room-task-runs Clean room task runs are the executions of notebooks in a clean room. - clean-rooms A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting environment where multiple parties can work together on sensitive enterprise data without direct access to each other’s data. + clean-rooms A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting environment where multiple parties can work together on sensitive enterprise data without direct access to each other's data. Database database Database Instances provide access to a database via REST API or direct SQL. @@ -130,6 +132,9 @@ Database Quality Monitor v2 quality-monitor-v2 Manage data quality of UC objects (currently support schema). +OAuth + service-principal-secrets-proxy These APIs enable administrators to manage service principal secrets at the workspace level. + Additional Commands: account Databricks Account Commands api Perform Databricks API call diff --git a/bundle/config/variable/resolve_dashboard.go b/bundle/config/variable/resolve_dashboard.go index 2979716ce1..63770011fd 100644 --- a/bundle/config/variable/resolve_dashboard.go +++ b/bundle/config/variable/resolve_dashboard.go @@ -2,8 +2,10 @@ package variable import ( "context" + "fmt" "github.com/databricks/databricks-sdk-go" + "github.com/databricks/databricks-sdk-go/service/sql" ) type resolveDashboard struct { @@ -11,11 +13,26 @@ type resolveDashboard struct { } func (l resolveDashboard) Resolve(ctx context.Context, w *databricks.WorkspaceClient) (string, error) { - entity, err := w.Dashboards.GetByName(ctx, l.name) + // List dashboards and find the one with the given name + // If there are multiple dashboards with the same name, return an error + dashboards, err := w.Dashboards.ListAll(ctx, sql.ListDashboardsRequest{}) if err != nil { return "", err } - return entity.Id, nil + + dashboardMap := make(map[string][]sql.Dashboard) + for _, dashboard := range dashboards { + dashboardMap[dashboard.Name] = append(dashboardMap[dashboard.Name], dashboard) + } + + alternatives, ok := dashboardMap[l.name] + if !ok || len(alternatives) == 0 { + return "", fmt.Errorf("dashboard name '%s' does not exist", l.name) + } + if len(alternatives) > 1 { + return "", fmt.Errorf("there are %d instances of dashboards named '%s'", len(alternatives), l.name) + } + return alternatives[0].Id, nil } func (l resolveDashboard) String() string { diff --git a/bundle/config/variable/resolve_dashboard_test.go b/bundle/config/variable/resolve_dashboard_test.go index 3afed47941..f882e716be 100644 --- a/bundle/config/variable/resolve_dashboard_test.go +++ b/bundle/config/variable/resolve_dashboard_test.go @@ -4,7 +4,6 @@ import ( "context" "testing" - "github.com/databricks/databricks-sdk-go/apierr" "github.com/databricks/databricks-sdk-go/experimental/mocks" "github.com/databricks/databricks-sdk-go/service/sql" "github.com/stretchr/testify/assert" @@ -17,9 +16,10 @@ func TestResolveDashboard_ResolveSuccess(t *testing.T) { api := m.GetMockDashboardsAPI() api.EXPECT(). - GetByName(mock.Anything, "dashboard"). - Return(&sql.Dashboard{ - Id: "1234", + ListAll(mock.Anything, mock.Anything). + Return([]sql.Dashboard{ + {Id: "1234", Name: "dashboard"}, + {Id: "5678", Name: "dashboard2"}, }, nil) ctx := context.Background() @@ -34,13 +34,16 @@ func TestResolveDashboard_ResolveNotFound(t *testing.T) { api := m.GetMockDashboardsAPI() api.EXPECT(). - GetByName(mock.Anything, "dashboard"). - Return(nil, &apierr.APIError{StatusCode: 404}) + ListAll(mock.Anything, mock.Anything). + Return([]sql.Dashboard{ + {Id: "1234", Name: "dashboard1"}, + {Id: "5678", Name: "dashboard2"}, + }, nil) ctx := context.Background() l := resolveDashboard{name: "dashboard"} _, err := l.Resolve(ctx, m.WorkspaceClient) - require.ErrorIs(t, err, apierr.ErrNotFound) + require.ErrorContains(t, err, "dashboard name 'dashboard' does not exist") } func TestResolveDashboard_String(t *testing.T) { diff --git a/bundle/internal/schema/annotations_openapi.yml b/bundle/internal/schema/annotations_openapi.yml index 7ab7cbc9ca..34b7c43cc2 100644 --- a/bundle/internal/schema/annotations_openapi.yml +++ b/bundle/internal/schema/annotations_openapi.yml @@ -365,9 +365,9 @@ github.com/databricks/cli/bundle/config/resources.Job: The queue settings of the job. "run_as": "description": |- - Write-only setting. Specifies the user or service principal that the job runs as. If not specified, the job runs as the user who created the job. - - Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown. + The user or service principal that the job runs as, if specified in the request. + This field indicates the explicit configuration of `run_as` for the job. + To find the value in all cases, explicit or implicit, use `run_as_user_name`. "schedule": "description": |- An optional periodic schedule for this job. The default behavior is that the job only runs when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`. @@ -434,6 +434,7 @@ github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint: "config": "description": |- The core config of the serving endpoint. + "description": {} "name": "description": |- The name of the serving endpoint. This field is required and must be unique across a Databricks workspace. @@ -486,8 +487,6 @@ github.com/databricks/cli/bundle/config/resources.Pipeline: "environment": "description": |- Environment specification for this pipeline used to install dependencies. - "x-databricks-preview": |- - PRIVATE "event_log": "description": |- Event log configuration for this pipeline @@ -527,8 +526,6 @@ github.com/databricks/cli/bundle/config/resources.Pipeline: Root path for this pipeline. This is used as the root directory when editing the pipeline in the Databricks user interface and it is added to sys.path when executing Python sources during pipeline execution. - "x-databricks-preview": |- - PRIVATE "run_as": "description": |- Write-only setting, available only in Create/Update calls. Specifies the user or service principal that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline. @@ -718,6 +715,7 @@ github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentStatus: "description": |- State of the deployment. github.com/databricks/databricks-sdk-go/service/apps.AppResource: + "database": {} "description": "description": |- Description of the App Resource. @@ -729,6 +727,15 @@ github.com/databricks/databricks-sdk-go/service/apps.AppResource: "serving_endpoint": {} "sql_warehouse": {} "uc_securable": {} +github.com/databricks/databricks-sdk-go/service/apps.AppResourceDatabase: + "database_name": {} + "instance_name": {} + "permission": {} +github.com/databricks/databricks-sdk-go/service/apps.AppResourceDatabaseDatabasePermission: + "_": + "enum": + - |- + CAN_CONNECT_AND_CREATE github.com/databricks/databricks-sdk-go/service/apps.AppResourceJob: "id": "description": |- @@ -2790,6 +2797,9 @@ github.com/databricks/databricks-sdk-go/service/ml.ExperimentTag: "description": |- The tag value. github.com/databricks/databricks-sdk-go/service/ml.ModelTag: + "_": + "description": |- + Tag for a registered model "key": "description": |- The tag key. @@ -2910,6 +2920,8 @@ github.com/databricks/databricks-sdk-go/service/pipelines.IngestionSourceType: SQLSERVER - |- SALESFORCE + - |- + BIGQUERY - |- NETSUITE - |- @@ -2928,6 +2940,8 @@ github.com/databricks/databricks-sdk-go/service/pipelines.IngestionSourceType: SHAREPOINT - |- DYNAMICS365 + - |- + CONFLUENCE github.com/databricks/databricks-sdk-go/service/pipelines.ManualTrigger: {} github.com/databricks/databricks-sdk-go/service/pipelines.NotebookLibrary: "path": @@ -3092,8 +3106,6 @@ github.com/databricks/databricks-sdk-go/service/pipelines.PipelineLibrary: The unified field to include source codes. Each entry can be a notebook path, a file path, or a folder path that ends `/**`. This field cannot be used together with `notebook` or `file`. - "x-databricks-preview": |- - PRIVATE "jar": "description": |- URI of the jar to be installed. Currently only DBFS is supported. @@ -3243,6 +3255,8 @@ github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfigScd SCD_TYPE_1 - |- SCD_TYPE_2 + - |- + APPEND_ONLY github.com/databricks/databricks-sdk-go/service/serving.Ai21LabsConfig: "ai21labs_api_key": "description": |- @@ -3280,6 +3294,8 @@ github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailParame "description": |- List of invalid keywords. AI guardrail uses keyword or string matching to decide if the keyword exists in the request or response content. + "deprecation_message": |- + This field is deprecated "pii": "description": |- Configuration for guardrail PII filter. @@ -3290,6 +3306,8 @@ github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailParame "description": |- The list of allowed topics. Given a chat request, this guardrail flags the request if its topic is not in the allowed topics. + "deprecation_message": |- + This field is deprecated github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailPiiBehavior: "behavior": "description": |- @@ -3330,8 +3348,11 @@ github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimit: Used to specify how many calls are allowed for a key within the renewal_period. "key": "description": |- - Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, + Key field for a rate limit. Currently, 'user', 'user_group, 'service_principal', and 'endpoint' are supported, with 'endpoint' being the default if not specified. + "principal": + "description": |- + Principal field for a user, user group, or service principal to apply rate limiting to. Accepts a user email, group name, or service principal application ID. "renewal_period": "description": |- Renewal period field for a rate limit. Currently, only 'minute' is supported. @@ -3342,6 +3363,10 @@ github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitKey: user - |- endpoint + - |- + user_group + - |- + service_principal github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitRenewalPeriod: "_": "enum": @@ -3728,6 +3753,7 @@ github.com/databricks/databricks-sdk-go/service/serving.RateLimitRenewalPeriod: - |- minute github.com/databricks/databricks-sdk-go/service/serving.Route: + "served_entity_name": {} "served_model_name": "description": |- The name of the served model this route configures traffic for. @@ -3846,6 +3872,9 @@ github.com/databricks/databricks-sdk-go/service/serving.TrafficConfig: "description": |- The list of routes that define traffic to each served entity. github.com/databricks/databricks-sdk-go/service/workspace.AzureKeyVaultSecretScopeMetadata: + "_": + "description": |- + The metadata of the Azure KeyVault for a secret scope of type `AZURE_KEYVAULT` "dns_name": "description": |- The DNS of the KeyVault @@ -3854,6 +3883,9 @@ github.com/databricks/databricks-sdk-go/service/workspace.AzureKeyVaultSecretSco The resource id of the azure KeyVault that user wants to associate the scope with. github.com/databricks/databricks-sdk-go/service/workspace.ScopeBackendType: "_": + "description": |- + The types of secret scope backends in the Secret Manager. Azure KeyVault backed secret scopes + will be supported in a later release. "enum": - |- DATABRICKS diff --git a/bundle/internal/schema/annotations_openapi_overrides.yml b/bundle/internal/schema/annotations_openapi_overrides.yml index b9347d9ccc..b80b4840b8 100644 --- a/bundle/internal/schema/annotations_openapi_overrides.yml +++ b/bundle/internal/schema/annotations_openapi_overrides.yml @@ -254,6 +254,9 @@ github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint: - key: "team" value: "data science" ``` + "description": + "description": |- + PLACEHOLDER "permissions": "description": |- PLACEHOLDER @@ -493,6 +496,9 @@ github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentStatus: "description": |- PLACEHOLDER github.com/databricks/databricks-sdk-go/service/apps.AppResource: + "database": + "description": |- + PLACEHOLDER "job": "description": |- PLACEHOLDER @@ -508,6 +514,16 @@ github.com/databricks/databricks-sdk-go/service/apps.AppResource: "uc_securable": "description": |- PLACEHOLDER +github.com/databricks/databricks-sdk-go/service/apps.AppResourceDatabase: + "database_name": + "description": |- + PLACEHOLDER + "instance_name": + "description": |- + PLACEHOLDER + "permission": + "description": |- + PLACEHOLDER github.com/databricks/databricks-sdk-go/service/apps.AppResourceJob: "id": "description": |- @@ -700,6 +716,10 @@ github.com/databricks/databricks-sdk-go/service/pipelines.PipelineTrigger: "manual": "description": |- PLACEHOLDER +github.com/databricks/databricks-sdk-go/service/serving.Route: + "served_entity_name": + "description": |- + PLACEHOLDER github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput: "entity_version": "description": |- diff --git a/bundle/internal/schema/parser.go b/bundle/internal/schema/parser.go index 51c8128ff9..8db276f061 100644 --- a/bundle/internal/schema/parser.go +++ b/bundle/internal/schema/parser.go @@ -174,13 +174,26 @@ func (p *openapiParser) extractAnnotations(typ reflect.Type, outputPath, overrid refProp.DeprecationMessage = "This field is deprecated" } + description := refProp.Description + + // If the field doesn't have a description, try to find the referenced type + // and use its description. This handles cases where the field references + // a type that has a description but the field itself doesn't. + if description == "" && refProp.Reference != nil { + refPath := *refProp.Reference + refTypeName := strings.TrimPrefix(refPath, "#/components/schemas/") + if refType, ok := p.ref[refTypeName]; ok { + description = refType.Description + } + } + pkg[k] = annotation.Descriptor{ - Description: refProp.Description, + Description: description, Enum: refProp.Enum, Preview: preview, DeprecationMessage: refProp.DeprecationMessage, } - if refProp.Description == "" { + if description == "" { addEmptyOverride(k, basePath, overrides) } } else { diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index 4dca201511..33fd5daf67 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -870,6 +870,9 @@ "description": "The core config of the serving endpoint.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.EndpointCoreConfigInput" }, + "description": { + "$ref": "#/$defs/string" + }, "name": { "description": "The name of the serving endpoint. This field is required and must be unique across a Databricks workspace.\nAn endpoint name can consist of alphanumeric characters, dashes, and underscores.", "$ref": "#/$defs/string" @@ -1023,9 +1026,7 @@ }, "environment": { "description": "Environment specification for this pipeline used to install dependencies.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PipelinesEnvironment", - "x-databricks-preview": "PRIVATE", - "doNotSuggest": true + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PipelinesEnvironment" }, "event_log": { "description": "Event log configuration for this pipeline", @@ -1076,9 +1077,7 @@ }, "root_path": { "description": "Root path for this pipeline.\nThis is used as the root directory when editing the pipeline in the Databricks user interface and it is\nadded to sys.path when executing Python sources during pipeline execution.", - "$ref": "#/$defs/string", - "x-databricks-preview": "PRIVATE", - "doNotSuggest": true + "$ref": "#/$defs/string" }, "run_as": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.RunAs", @@ -2286,6 +2285,9 @@ { "type": "object", "properties": { + "database": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceDatabase" + }, "description": { "description": "Description of the App Resource.", "$ref": "#/$defs/string" @@ -2321,6 +2323,48 @@ } ] }, + "apps.AppResourceDatabase": { + "oneOf": [ + { + "type": "object", + "properties": { + "database_name": { + "$ref": "#/$defs/string" + }, + "instance_name": { + "$ref": "#/$defs/string" + }, + "permission": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceDatabaseDatabasePermission" + } + }, + "additionalProperties": false, + "required": [ + "database_name", + "instance_name", + "permission" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "apps.AppResourceDatabaseDatabasePermission": { + "oneOf": [ + { + "type": "string", + "enum": [ + "CAN_CONNECT_AND_CREATE" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "apps.AppResourceJob": { "oneOf": [ { @@ -5822,6 +5866,7 @@ "oneOf": [ { "type": "object", + "description": "Tag for a registered model", "properties": { "key": { "description": "The tag key.", @@ -6073,6 +6118,7 @@ "POSTGRESQL", "SQLSERVER", "SALESFORCE", + "BIGQUERY", "NETSUITE", "WORKDAY_RAAS", "GA4_RAW_DATA", @@ -6081,7 +6127,8 @@ "ORACLE", "TERADATA", "SHAREPOINT", - "DYNAMICS365" + "DYNAMICS365", + "CONFLUENCE" ] }, { @@ -6332,9 +6379,7 @@ }, "glob": { "description": "The unified field to include source codes.\nEach entry can be a notebook path, a file path, or a folder path that ends `/**`.\nThis field cannot be used together with `notebook` or `file`.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PathPattern", - "x-databricks-preview": "PRIVATE", - "doNotSuggest": true + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PathPattern" }, "jar": { "description": "URI of the jar to be installed. Currently only DBFS is supported.", @@ -6632,7 +6677,8 @@ "description": "The SCD type to use to ingest the table.", "enum": [ "SCD_TYPE_1", - "SCD_TYPE_2" + "SCD_TYPE_2", + "APPEND_ONLY" ] }, { @@ -6704,7 +6750,9 @@ "properties": { "invalid_keywords": { "description": "List of invalid keywords.\nAI guardrail uses keyword or string matching to decide if the keyword exists in the request or response content.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "deprecationMessage": "This field is deprecated", + "deprecated": true }, "pii": { "description": "Configuration for guardrail PII filter.", @@ -6716,7 +6764,9 @@ }, "valid_topics": { "description": "The list of allowed topics.\nGiven a chat request, this guardrail flags the request if its topic is not in the allowed topics.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "deprecationMessage": "This field is deprecated", + "deprecated": true } }, "additionalProperties": false @@ -6822,9 +6872,13 @@ "$ref": "#/$defs/int64" }, "key": { - "description": "Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported,\nwith 'endpoint' being the default if not specified.", + "description": "Key field for a rate limit. Currently, 'user', 'user_group, 'service_principal', and 'endpoint' are supported,\nwith 'endpoint' being the default if not specified.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitKey" }, + "principal": { + "description": "Principal field for a user, user group, or service principal to apply rate limiting to. Accepts a user email, group name, or service principal application ID.", + "$ref": "#/$defs/string" + }, "renewal_period": { "description": "Renewal period field for a rate limit. Currently, only 'minute' is supported.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitRenewalPeriod" @@ -6832,7 +6886,6 @@ }, "additionalProperties": false, "required": [ - "calls", "renewal_period" ] }, @@ -6848,7 +6901,9 @@ "type": "string", "enum": [ "user", - "endpoint" + "endpoint", + "user_group", + "service_principal" ] }, { @@ -7484,6 +7539,9 @@ { "type": "object", "properties": { + "served_entity_name": { + "$ref": "#/$defs/string" + }, "served_model_name": { "description": "The name of the served model this route configures traffic for.", "$ref": "#/$defs/string" @@ -7495,7 +7553,6 @@ }, "additionalProperties": false, "required": [ - "served_model_name", "traffic_percentage" ] }, @@ -7703,6 +7760,7 @@ "oneOf": [ { "type": "object", + "description": "The metadata of the Azure KeyVault for a secret scope of type `AZURE_KEYVAULT`", "properties": { "dns_name": { "description": "The DNS of the KeyVault", @@ -7729,6 +7787,7 @@ "oneOf": [ { "type": "string", + "description": "The types of secret scope backends in the Secret Manager. Azure KeyVault backed secret scopes\nwill be supported in a later release.", "enum": [ "DATABRICKS", "AZURE_KEYVAULT" diff --git a/cmd/account/billable-usage/billable-usage.go b/cmd/account/billable-usage/billable-usage.go index 201020e402..4e7949336d 100755 --- a/cmd/account/billable-usage/billable-usage.go +++ b/cmd/account/billable-usage/billable-usage.go @@ -59,19 +59,23 @@ func newDownload() *cobra.Command { cmd.Long = `Return billable usage logs. Returns billable usage logs in CSV format for the specified account and date - range. For the data schema, see [CSV file schema]. Note that this method might - take multiple minutes to complete. + range. For the data schema, see: + + - AWS: [CSV file schema]. - GCP: [CSV file schema]. + + Note that this method might take multiple minutes to complete. **Warning**: Depending on the queried date range, the number of workspaces in the account, the size of the response and the internet speed of the caller, this API may hit a timeout after a few minutes. If you experience this, try to mitigate by calling the API with narrower date ranges. - [CSV file schema]: https://docs.databricks.com/administration-guide/account-settings/usage-analysis.html#schema + [CSV file schema]: https://docs.gcp.databricks.com/administration-guide/account-settings/usage-analysis.html#csv-file-schema Arguments: - START_MONTH: Format: YYYY-MM. First month to return billable usage logs for. This - field is required. + START_MONTH: Format specification for month in the format YYYY-MM. This is used to + specify billable usage start_month and end_month properties. **Note**: + Billable usage logs are unavailable before March 2019 (2019-03). END_MONTH: Format: YYYY-MM. Last month to return billable usage logs for. This field is required.` diff --git a/cmd/account/ip-access-lists/ip-access-lists.go b/cmd/account/ip-access-lists/ip-access-lists.go index 479afc1a24..2063c5f86c 100755 --- a/cmd/account/ip-access-lists/ip-access-lists.go +++ b/cmd/account/ip-access-lists/ip-access-lists.go @@ -106,12 +106,7 @@ func newCreate() *cobra.Command { Arguments: LABEL: Label for the IP access list. This **cannot** be empty. - LIST_TYPE: Type of IP access list. Valid values are as follows and are - case-sensitive: - - * ALLOW: An allow list. Include this IP or range. * BLOCK: A block - list. Exclude this IP or range. IP addresses in the block list are - excluded even if they are included in an allow list. + LIST_TYPE: Supported values: [ALLOW, BLOCK]` cmd.Annotations = make(map[string]string) @@ -386,12 +381,7 @@ func newReplace() *cobra.Command { Arguments: IP_ACCESS_LIST_ID: The ID for the corresponding IP access list LABEL: Label for the IP access list. This **cannot** be empty. - LIST_TYPE: Type of IP access list. Valid values are as follows and are - case-sensitive: - - * ALLOW: An allow list. Include this IP or range. * BLOCK: A block - list. Exclude this IP or range. IP addresses in the block list are - excluded even if they are included in an allow list. + LIST_TYPE: Supported values: [ALLOW, BLOCK] ENABLED: Specifies whether this IP access list is enabled.` @@ -482,7 +472,7 @@ func newUpdate() *cobra.Command { cmd.Flags().BoolVar(&updateReq.Enabled, "enabled", updateReq.Enabled, `Specifies whether this IP access list is enabled.`) // TODO: array: ip_addresses cmd.Flags().StringVar(&updateReq.Label, "label", updateReq.Label, `Label for the IP access list.`) - cmd.Flags().Var(&updateReq.ListType, "list-type", `Type of IP access list. Supported values: [ALLOW, BLOCK]`) + cmd.Flags().Var(&updateReq.ListType, "list-type", `Supported values: [ALLOW, BLOCK]`) cmd.Use = "update IP_ACCESS_LIST_ID" cmd.Short = `Update access list.` diff --git a/cmd/account/private-access/private-access.go b/cmd/account/private-access/private-access.go index 4990af8a5e..ece5203cdb 100755 --- a/cmd/account/private-access/private-access.go +++ b/cmd/account/private-access/private-access.go @@ -62,7 +62,7 @@ func newCreate() *cobra.Command { cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: allowed_vpc_endpoint_ids - cmd.Flags().Var(&createReq.PrivateAccessLevel, "private-access-level", `The private access level controls which VPC endpoints can connect to the UI or API of any workspace that attaches this private access settings object. Supported values: [ACCOUNT, ENDPOINT]`) + cmd.Flags().Var(&createReq.PrivateAccessLevel, "private-access-level", `Supported values: [ACCOUNT, ENDPOINT]`) cmd.Flags().BoolVar(&createReq.PublicAccessEnabled, "public-access-enabled", createReq.PublicAccessEnabled, `Determines if the workspace can be accessed over public internet.`) cmd.Use = "create PRIVATE_ACCESS_SETTINGS_NAME REGION" @@ -358,7 +358,7 @@ func newReplace() *cobra.Command { cmd.Flags().Var(&replaceJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: allowed_vpc_endpoint_ids - cmd.Flags().Var(&replaceReq.PrivateAccessLevel, "private-access-level", `The private access level controls which VPC endpoints can connect to the UI or API of any workspace that attaches this private access settings object. Supported values: [ACCOUNT, ENDPOINT]`) + cmd.Flags().Var(&replaceReq.PrivateAccessLevel, "private-access-level", `Supported values: [ACCOUNT, ENDPOINT]`) cmd.Flags().BoolVar(&replaceReq.PublicAccessEnabled, "public-access-enabled", replaceReq.PublicAccessEnabled, `Determines if the workspace can be accessed over public internet.`) cmd.Use = "replace PRIVATE_ACCESS_SETTINGS_ID PRIVATE_ACCESS_SETTINGS_NAME REGION" diff --git a/cmd/account/service-principal-secrets/service-principal-secrets.go b/cmd/account/service-principal-secrets/service-principal-secrets.go index f24e63fa56..f811c3f6dd 100755 --- a/cmd/account/service-principal-secrets/service-principal-secrets.go +++ b/cmd/account/service-principal-secrets/service-principal-secrets.go @@ -3,8 +3,6 @@ package service_principal_secrets import ( - "fmt" - "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdctx" "github.com/databricks/cli/libs/cmdio" @@ -26,7 +24,7 @@ func New() *cobra.Command { You can use the generated secrets to obtain OAuth access tokens for a service principal, which can then be used to access Databricks Accounts and Workspace APIs. For more information, see [Authentication using OAuth tokens for service - principals], + principals]. In addition, the generated secrets can be used to configure the Databricks Terraform Provider to authenticate with the service principal. For more @@ -106,10 +104,7 @@ func newCreate() *cobra.Command { } } } - _, err = fmt.Sscan(args[0], &createReq.ServicePrincipalId) - if err != nil { - return fmt.Errorf("invalid SERVICE_PRINCIPAL_ID: %s", args[0]) - } + createReq.ServicePrincipalId = args[0] response, err := a.ServicePrincipalSecrets.Create(ctx, createReq) if err != nil { @@ -166,10 +161,7 @@ func newDelete() *cobra.Command { ctx := cmd.Context() a := cmdctx.AccountClient(ctx) - _, err = fmt.Sscan(args[0], &deleteReq.ServicePrincipalId) - if err != nil { - return fmt.Errorf("invalid SERVICE_PRINCIPAL_ID: %s", args[0]) - } + deleteReq.ServicePrincipalId = args[0] deleteReq.SecretId = args[1] err = a.ServicePrincipalSecrets.Delete(ctx, deleteReq) @@ -205,6 +197,7 @@ func newList() *cobra.Command { var listReq oauth2.ListServicePrincipalSecretsRequest + cmd.Flags().IntVar(&listReq.PageSize, "page-size", listReq.PageSize, ``) cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `An opaque page token which was the next_page_token in the response of the previous request to list the secrets for this service principal.`) cmd.Use = "list SERVICE_PRINCIPAL_ID" @@ -230,10 +223,7 @@ func newList() *cobra.Command { ctx := cmd.Context() a := cmdctx.AccountClient(ctx) - _, err = fmt.Sscan(args[0], &listReq.ServicePrincipalId) - if err != nil { - return fmt.Errorf("invalid SERVICE_PRINCIPAL_ID: %s", args[0]) - } + listReq.ServicePrincipalId = args[0] response := a.ServicePrincipalSecrets.List(ctx, listReq) return cmdio.RenderIterator(ctx, response) diff --git a/cmd/account/workspaces/workspaces.go b/cmd/account/workspaces/workspaces.go index 577b45bb12..97641f3af4 100755 --- a/cmd/account/workspaces/workspaces.go +++ b/cmd/account/workspaces/workspaces.go @@ -87,7 +87,7 @@ func newCreate() *cobra.Command { cmd.Flags().StringVar(&createReq.Location, "location", createReq.Location, `The Google Cloud region of the workspace data plane in your Google account.`) cmd.Flags().StringVar(&createReq.ManagedServicesCustomerManagedKeyId, "managed-services-customer-managed-key-id", createReq.ManagedServicesCustomerManagedKeyId, `The ID of the workspace's managed services encryption key configuration object.`) cmd.Flags().StringVar(&createReq.NetworkId, "network-id", createReq.NetworkId, ``) - cmd.Flags().Var(&createReq.PricingTier, "pricing-tier", `The pricing tier of the workspace. Supported values: [ + cmd.Flags().Var(&createReq.PricingTier, "pricing-tier", `Supported values: [ COMMUNITY_EDITION, DEDICATED, ENTERPRISE, diff --git a/cmd/workspace/alerts-legacy/alerts-legacy.go b/cmd/workspace/alerts-legacy/alerts-legacy.go index f598c0d128..c5cded70f1 100755 --- a/cmd/workspace/alerts-legacy/alerts-legacy.go +++ b/cmd/workspace/alerts-legacy/alerts-legacy.go @@ -156,28 +156,16 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := cmdctx.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ALERT_ID argument specified. Loading names for Alerts Legacy drop-down." - names, err := w.AlertsLegacy.LegacyAlertNameToIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Alerts Legacy drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have ") - } deleteReq.AlertId = args[0] err = w.AlertsLegacy.Delete(ctx, deleteReq) @@ -226,28 +214,16 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := cmdctx.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ALERT_ID argument specified. Loading names for Alerts Legacy drop-down." - names, err := w.AlertsLegacy.LegacyAlertNameToIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Alerts Legacy drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have ") - } getReq.AlertId = args[0] response, err := w.AlertsLegacy.Get(ctx, getReq) diff --git a/cmd/workspace/alerts-v2/alerts-v2.go b/cmd/workspace/alerts-v2/alerts-v2.go index e5d9994f92..d708232475 100755 --- a/cmd/workspace/alerts-v2/alerts-v2.go +++ b/cmd/workspace/alerts-v2/alerts-v2.go @@ -68,6 +68,7 @@ func newCreateAlert() *cobra.Command { // TODO: complex arg: evaluation cmd.Flags().StringVar(&createAlertReq.Alert.ParentPath, "parent-path", createAlertReq.Alert.ParentPath, `The workspace path of the folder containing the alert.`) cmd.Flags().StringVar(&createAlertReq.Alert.QueryText, "query-text", createAlertReq.Alert.QueryText, `Text of the query to be run.`) + cmd.Flags().StringVar(&createAlertReq.Alert.RunAsUserName, "run-as-user-name", createAlertReq.Alert.RunAsUserName, `The run as username or application ID of service principal.`) // TODO: complex arg: schedule cmd.Flags().StringVar(&createAlertReq.Alert.WarehouseId, "warehouse-id", createAlertReq.Alert.WarehouseId, `ID of the SQL warehouse attached to the alert.`) @@ -328,6 +329,7 @@ func newUpdateAlert() *cobra.Command { // TODO: complex arg: evaluation cmd.Flags().StringVar(&updateAlertReq.Alert.ParentPath, "parent-path", updateAlertReq.Alert.ParentPath, `The workspace path of the folder containing the alert.`) cmd.Flags().StringVar(&updateAlertReq.Alert.QueryText, "query-text", updateAlertReq.Alert.QueryText, `Text of the query to be run.`) + cmd.Flags().StringVar(&updateAlertReq.Alert.RunAsUserName, "run-as-user-name", updateAlertReq.Alert.RunAsUserName, `The run as username or application ID of service principal.`) // TODO: complex arg: schedule cmd.Flags().StringVar(&updateAlertReq.Alert.WarehouseId, "warehouse-id", updateAlertReq.Alert.WarehouseId, `ID of the SQL warehouse attached to the alert.`) diff --git a/cmd/workspace/clean-rooms/clean-rooms.go b/cmd/workspace/clean-rooms/clean-rooms.go index 69835a14e4..71c003c21f 100755 --- a/cmd/workspace/clean-rooms/clean-rooms.go +++ b/cmd/workspace/clean-rooms/clean-rooms.go @@ -18,10 +18,10 @@ var cmdOverrides []func(*cobra.Command) func New() *cobra.Command { cmd := &cobra.Command{ Use: "clean-rooms", - Short: `A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting environment where multiple parties can work together on sensitive enterprise data without direct access to each other’s data.`, + Short: `A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting environment where multiple parties can work together on sensitive enterprise data without direct access to each other's data.`, Long: `A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting environment where multiple parties can work together on - sensitive enterprise data without direct access to each other’s data.`, + sensitive enterprise data without direct access to each other's data.`, GroupID: "cleanrooms", Annotations: map[string]string{ "package": "cleanrooms", diff --git a/cmd/workspace/clusters/clusters.go b/cmd/workspace/clusters/clusters.go index 908f007c1a..72cbe890d3 100755 --- a/cmd/workspace/clusters/clusters.go +++ b/cmd/workspace/clusters/clusters.go @@ -204,7 +204,7 @@ func newCreate() *cobra.Command { // TODO: complex arg: cluster_log_conf cmd.Flags().StringVar(&createReq.ClusterName, "cluster-name", createReq.ClusterName, `Cluster name requested by the user.`) // TODO: map via StringToStringVar: custom_tags - cmd.Flags().Var(&createReq.DataSecurityMode, "data-security-mode", `Data security mode decides what data governance model to use when accessing data from a cluster. Supported values: [ + cmd.Flags().Var(&createReq.DataSecurityMode, "data-security-mode", `Supported values: [ DATA_SECURITY_MODE_AUTO, DATA_SECURITY_MODE_DEDICATED, DATA_SECURITY_MODE_STANDARD, @@ -225,7 +225,7 @@ func newCreate() *cobra.Command { // TODO: array: init_scripts cmd.Flags().StringVar(&createReq.InstancePoolId, "instance-pool-id", createReq.InstancePoolId, `The optional ID of the instance pool to which the cluster belongs.`) cmd.Flags().BoolVar(&createReq.IsSingleNode, "is-single-node", createReq.IsSingleNode, `This field can only be used when kind = CLASSIC_PREVIEW.`) - cmd.Flags().Var(&createReq.Kind, "kind", `The kind of compute described by this compute specification. Supported values: [CLASSIC_PREVIEW]`) + cmd.Flags().Var(&createReq.Kind, "kind", `Supported values: [CLASSIC_PREVIEW]`) cmd.Flags().StringVar(&createReq.NodeTypeId, "node-type-id", createReq.NodeTypeId, `This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster.`) cmd.Flags().IntVar(&createReq.NumWorkers, "num-workers", createReq.NumWorkers, `Number of worker nodes that this cluster should have.`) cmd.Flags().StringVar(&createReq.PolicyId, "policy-id", createReq.PolicyId, `The ID of the cluster policy used to create the cluster if applicable.`) @@ -479,7 +479,7 @@ func newEdit() *cobra.Command { // TODO: complex arg: cluster_log_conf cmd.Flags().StringVar(&editReq.ClusterName, "cluster-name", editReq.ClusterName, `Cluster name requested by the user.`) // TODO: map via StringToStringVar: custom_tags - cmd.Flags().Var(&editReq.DataSecurityMode, "data-security-mode", `Data security mode decides what data governance model to use when accessing data from a cluster. Supported values: [ + cmd.Flags().Var(&editReq.DataSecurityMode, "data-security-mode", `Supported values: [ DATA_SECURITY_MODE_AUTO, DATA_SECURITY_MODE_DEDICATED, DATA_SECURITY_MODE_STANDARD, @@ -500,7 +500,7 @@ func newEdit() *cobra.Command { // TODO: array: init_scripts cmd.Flags().StringVar(&editReq.InstancePoolId, "instance-pool-id", editReq.InstancePoolId, `The optional ID of the instance pool to which the cluster belongs.`) cmd.Flags().BoolVar(&editReq.IsSingleNode, "is-single-node", editReq.IsSingleNode, `This field can only be used when kind = CLASSIC_PREVIEW.`) - cmd.Flags().Var(&editReq.Kind, "kind", `The kind of compute described by this compute specification. Supported values: [CLASSIC_PREVIEW]`) + cmd.Flags().Var(&editReq.Kind, "kind", `Supported values: [CLASSIC_PREVIEW]`) cmd.Flags().StringVar(&editReq.NodeTypeId, "node-type-id", editReq.NodeTypeId, `This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster.`) cmd.Flags().IntVar(&editReq.NumWorkers, "num-workers", editReq.NumWorkers, `Number of worker nodes that this cluster should have.`) cmd.Flags().StringVar(&editReq.PolicyId, "policy-id", editReq.PolicyId, `The ID of the cluster policy used to create the cluster if applicable.`) diff --git a/cmd/workspace/cmd.go b/cmd/workspace/cmd.go index e84487843d..3203f3176e 100755 --- a/cmd/workspace/cmd.go +++ b/cmd/workspace/cmd.go @@ -30,7 +30,9 @@ import ( data_sources "github.com/databricks/cli/cmd/workspace/data-sources" database "github.com/databricks/cli/cmd/workspace/database" experiments "github.com/databricks/cli/cmd/workspace/experiments" + external_lineage "github.com/databricks/cli/cmd/workspace/external-lineage" external_locations "github.com/databricks/cli/cmd/workspace/external-locations" + external_metadata "github.com/databricks/cli/cmd/workspace/external-metadata" feature_store "github.com/databricks/cli/cmd/workspace/feature-store" forecasting "github.com/databricks/cli/cmd/workspace/forecasting" functions "github.com/databricks/cli/cmd/workspace/functions" @@ -46,6 +48,7 @@ import ( lakeview "github.com/databricks/cli/cmd/workspace/lakeview" lakeview_embedded "github.com/databricks/cli/cmd/workspace/lakeview-embedded" libraries "github.com/databricks/cli/cmd/workspace/libraries" + materialized_features "github.com/databricks/cli/cmd/workspace/materialized-features" metastores "github.com/databricks/cli/cmd/workspace/metastores" model_registry "github.com/databricks/cli/cmd/workspace/model-registry" model_versions "github.com/databricks/cli/cmd/workspace/model-versions" @@ -81,6 +84,7 @@ import ( resource_quotas "github.com/databricks/cli/cmd/workspace/resource-quotas" schemas "github.com/databricks/cli/cmd/workspace/schemas" secrets "github.com/databricks/cli/cmd/workspace/secrets" + service_principal_secrets_proxy "github.com/databricks/cli/cmd/workspace/service-principal-secrets-proxy" service_principals "github.com/databricks/cli/cmd/workspace/service-principals" serving_endpoints "github.com/databricks/cli/cmd/workspace/serving-endpoints" settings "github.com/databricks/cli/cmd/workspace/settings" @@ -133,7 +137,9 @@ func All() []*cobra.Command { out = append(out, data_sources.New()) out = append(out, database.New()) out = append(out, experiments.New()) + out = append(out, external_lineage.New()) out = append(out, external_locations.New()) + out = append(out, external_metadata.New()) out = append(out, feature_store.New()) out = append(out, functions.New()) out = append(out, genie.New()) @@ -148,6 +154,7 @@ func All() []*cobra.Command { out = append(out, lakeview.New()) out = append(out, lakeview_embedded.New()) out = append(out, libraries.New()) + out = append(out, materialized_features.New()) out = append(out, metastores.New()) out = append(out, model_registry.New()) out = append(out, model_versions.New()) @@ -183,6 +190,7 @@ func All() []*cobra.Command { out = append(out, resource_quotas.New()) out = append(out, schemas.New()) out = append(out, secrets.New()) + out = append(out, service_principal_secrets_proxy.New()) out = append(out, service_principals.New()) out = append(out, serving_endpoints.New()) out = append(out, settings.New()) diff --git a/cmd/workspace/consumer-installations/consumer-installations.go b/cmd/workspace/consumer-installations/consumer-installations.go index d5abab4f7f..a686ad2ce9 100755 --- a/cmd/workspace/consumer-installations/consumer-installations.go +++ b/cmd/workspace/consumer-installations/consumer-installations.go @@ -64,7 +64,7 @@ func newCreate() *cobra.Command { // TODO: complex arg: accepted_consumer_terms cmd.Flags().StringVar(&createReq.CatalogName, "catalog-name", createReq.CatalogName, ``) - cmd.Flags().Var(&createReq.RecipientType, "recipient-type", `. Supported values: [DELTA_SHARING_RECIPIENT_TYPE_DATABRICKS, DELTA_SHARING_RECIPIENT_TYPE_OPEN]`) + cmd.Flags().Var(&createReq.RecipientType, "recipient-type", `Supported values: [DELTA_SHARING_RECIPIENT_TYPE_DATABRICKS, DELTA_SHARING_RECIPIENT_TYPE_OPEN]`) // TODO: complex arg: repo_detail cmd.Flags().StringVar(&createReq.ShareName, "share-name", createReq.ShareName, ``) diff --git a/cmd/workspace/consumer-personalization-requests/consumer-personalization-requests.go b/cmd/workspace/consumer-personalization-requests/consumer-personalization-requests.go index 47807dfb56..cfafe2072e 100755 --- a/cmd/workspace/consumer-personalization-requests/consumer-personalization-requests.go +++ b/cmd/workspace/consumer-personalization-requests/consumer-personalization-requests.go @@ -65,7 +65,7 @@ func newCreate() *cobra.Command { cmd.Flags().StringVar(&createReq.FirstName, "first-name", createReq.FirstName, ``) cmd.Flags().BoolVar(&createReq.IsFromLighthouse, "is-from-lighthouse", createReq.IsFromLighthouse, ``) cmd.Flags().StringVar(&createReq.LastName, "last-name", createReq.LastName, ``) - cmd.Flags().Var(&createReq.RecipientType, "recipient-type", `. Supported values: [DELTA_SHARING_RECIPIENT_TYPE_DATABRICKS, DELTA_SHARING_RECIPIENT_TYPE_OPEN]`) + cmd.Flags().Var(&createReq.RecipientType, "recipient-type", `Supported values: [DELTA_SHARING_RECIPIENT_TYPE_DATABRICKS, DELTA_SHARING_RECIPIENT_TYPE_OPEN]`) cmd.Use = "create LISTING_ID" cmd.Short = `Create a personalization request.` diff --git a/cmd/workspace/dashboard-widgets/dashboard-widgets.go b/cmd/workspace/dashboard-widgets/dashboard-widgets.go index fa92e12063..57273a3b2a 100755 --- a/cmd/workspace/dashboard-widgets/dashboard-widgets.go +++ b/cmd/workspace/dashboard-widgets/dashboard-widgets.go @@ -64,11 +64,14 @@ func newCreate() *cobra.Command { cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().StringVar(&createReq.Text, "text", createReq.Text, `If this is a textbox widget, the application displays this text.`) + cmd.Flags().StringVar(&createReq.VisualizationId, "visualization-id", createReq.VisualizationId, `Query Vizualization ID returned by :method:queryvisualizations/create.`) + cmd.Use = "create" cmd.Short = `Add widget to a dashboard.` cmd.Long = `Add widget to a dashboard. - Add widget to a dashboard` + Adds a widget to a dashboard` cmd.Annotations = make(map[string]string) @@ -129,7 +132,7 @@ func newDelete() *cobra.Command { cmd.Short = `Remove widget.` cmd.Long = `Remove widget. - Remove widget + Removes a widget from a dashboard Arguments: ID: Widget ID returned by :method:dashboardwidgets/create` @@ -173,22 +176,25 @@ func newDelete() *cobra.Command { // Functions can be added from the `init()` function in manually curated files in this directory. var updateOverrides []func( *cobra.Command, - *sql.CreateWidget, + *sql.UpdateWidgetRequest, ) func newUpdate() *cobra.Command { cmd := &cobra.Command{} - var updateReq sql.CreateWidget + var updateReq sql.UpdateWidgetRequest var updateJson flags.JsonFlag cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().StringVar(&updateReq.Text, "text", updateReq.Text, `If this is a textbox widget, the application displays this text.`) + cmd.Flags().StringVar(&updateReq.VisualizationId, "visualization-id", updateReq.VisualizationId, `Query Vizualization ID returned by :method:queryvisualizations/create.`) + cmd.Use = "update ID" cmd.Short = `Update existing widget.` cmd.Long = `Update existing widget. - Update existing widget + Updates an existing widget Arguments: ID: Widget ID returned by :method:dashboardwidgets/create` diff --git a/cmd/workspace/dashboards/dashboards.go b/cmd/workspace/dashboards/dashboards.go index 75e1e4b604..ce01dae8cd 100755 --- a/cmd/workspace/dashboards/dashboards.go +++ b/cmd/workspace/dashboards/dashboards.go @@ -67,12 +67,39 @@ func newCreate() *cobra.Command { cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - cmd.Use = "create" + cmd.Flags().BoolVar(&createReq.DashboardFiltersEnabled, "dashboard-filters-enabled", createReq.DashboardFiltersEnabled, `Indicates whether the dashboard filters are enabled.`) + cmd.Flags().BoolVar(&createReq.IsFavorite, "is-favorite", createReq.IsFavorite, `Indicates whether this dashboard object should appear in the current user's favorites list.`) + cmd.Flags().StringVar(&createReq.Parent, "parent", createReq.Parent, `The identifier of the workspace folder containing the object.`) + cmd.Flags().Var(&createReq.RunAsRole, "run-as-role", `Sets the **Run as** role for the object. Supported values: [owner, viewer]`) + // TODO: array: tags + + cmd.Use = "create NAME" cmd.Short = `Create a dashboard object.` - cmd.Long = `Create a dashboard object.` + cmd.Long = `Create a dashboard object. + + Creates a new dashboard object. Only the name parameter is required in the + POST request JSON body. Other fields can be included when duplicating + dashboards with this API. Databricks does not recommend designing dashboards + exclusively using this API.', + + Arguments: + NAME: The title of this dashboard that appears in list views and at the top of + the dashboard page.` cmd.Annotations = make(map[string]string) + cmd.Args = func(cmd *cobra.Command, args []string) error { + if cmd.Flags().Changed("json") { + err := root.ExactArgs(0)(cmd, args) + if err != nil { + return fmt.Errorf("when --json flag is specified, no positional arguments are required. Provide 'name' in your JSON input") + } + return nil + } + check := root.ExactArgs(1) + return check(cmd, args) + } + cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -89,8 +116,9 @@ func newCreate() *cobra.Command { return err } } - } else { - return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") + } + if !cmd.Flags().Changed("json") { + createReq.Name = args[0] } response, err := w.Dashboards.Create(ctx, createReq) @@ -135,28 +163,16 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := cmdctx.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No DASHBOARD_ID argument specified. Loading names for Dashboards drop-down." - names, err := w.Dashboards.DashboardNameToIdMap(ctx, sql.ListDashboardsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Dashboards drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have ") - } deleteReq.DashboardId = args[0] err = w.Dashboards.Delete(ctx, deleteReq) @@ -201,28 +217,16 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := cmdctx.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No DASHBOARD_ID argument specified. Loading names for Dashboards drop-down." - names, err := w.Dashboards.DashboardNameToIdMap(ctx, sql.ListDashboardsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Dashboards drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have ") - } getReq.DashboardId = args[0] response, err := w.Dashboards.Get(ctx, getReq) @@ -322,28 +326,16 @@ func newRestore() *cobra.Command { cmd.Annotations = make(map[string]string) + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := cmdctx.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No DASHBOARD_ID argument specified. Loading names for Dashboards drop-down." - names, err := w.Dashboards.DashboardNameToIdMap(ctx, sql.ListDashboardsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Dashboards drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have ") - } restoreReq.DashboardId = args[0] err = w.Dashboards.Restore(ctx, restoreReq) @@ -397,6 +389,11 @@ func newUpdate() *cobra.Command { cmd.Annotations = make(map[string]string) + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -414,23 +411,6 @@ func newUpdate() *cobra.Command { } } } - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No DASHBOARD_ID argument specified. Loading names for Dashboards drop-down." - names, err := w.Dashboards.DashboardNameToIdMap(ctx, sql.ListDashboardsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Dashboards drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have ") - } updateReq.DashboardId = args[0] response, err := w.Dashboards.Update(ctx, updateReq) diff --git a/cmd/workspace/database/database.go b/cmd/workspace/database/database.go index 117887647c..eca487bfb8 100755 --- a/cmd/workspace/database/database.go +++ b/cmd/workspace/database/database.go @@ -32,18 +32,22 @@ func New() *cobra.Command { // Add methods cmd.AddCommand(newCreateDatabaseCatalog()) cmd.AddCommand(newCreateDatabaseInstance()) + cmd.AddCommand(newCreateDatabaseInstanceRole()) cmd.AddCommand(newCreateDatabaseTable()) cmd.AddCommand(newCreateSyncedDatabaseTable()) cmd.AddCommand(newDeleteDatabaseCatalog()) cmd.AddCommand(newDeleteDatabaseInstance()) + cmd.AddCommand(newDeleteDatabaseInstanceRole()) cmd.AddCommand(newDeleteDatabaseTable()) cmd.AddCommand(newDeleteSyncedDatabaseTable()) cmd.AddCommand(newFindDatabaseInstanceByUid()) cmd.AddCommand(newGenerateDatabaseCredential()) cmd.AddCommand(newGetDatabaseCatalog()) cmd.AddCommand(newGetDatabaseInstance()) + cmd.AddCommand(newGetDatabaseInstanceRole()) cmd.AddCommand(newGetDatabaseTable()) cmd.AddCommand(newGetSyncedDatabaseTable()) + cmd.AddCommand(newListDatabaseInstanceRoles()) cmd.AddCommand(newListDatabaseInstances()) cmd.AddCommand(newUpdateDatabaseInstance()) @@ -163,6 +167,11 @@ func newCreateDatabaseInstance() *cobra.Command { cmd.Flags().Var(&createDatabaseInstanceJson, "json", `either inline JSON string or @path/to/file.json with request body`) cmd.Flags().StringVar(&createDatabaseInstanceReq.DatabaseInstance.Capacity, "capacity", createDatabaseInstanceReq.DatabaseInstance.Capacity, `The sku of the instance.`) + // TODO: array: child_instance_refs + cmd.Flags().BoolVar(&createDatabaseInstanceReq.DatabaseInstance.EnableReadableSecondaries, "enable-readable-secondaries", createDatabaseInstanceReq.DatabaseInstance.EnableReadableSecondaries, `Whether to enable secondaries to serve read-only traffic.`) + cmd.Flags().IntVar(&createDatabaseInstanceReq.DatabaseInstance.NodeCount, "node-count", createDatabaseInstanceReq.DatabaseInstance.NodeCount, `The number of nodes in the instance, composed of 1 primary and 0 or more secondaries.`) + // TODO: complex arg: parent_instance_ref + cmd.Flags().IntVar(&createDatabaseInstanceReq.DatabaseInstance.RetentionWindowInDays, "retention-window-in-days", createDatabaseInstanceReq.DatabaseInstance.RetentionWindowInDays, `The retention window for the instance.`) cmd.Flags().BoolVar(&createDatabaseInstanceReq.DatabaseInstance.Stopped, "stopped", createDatabaseInstanceReq.DatabaseInstance.Stopped, `Whether the instance is stopped.`) cmd.Use = "create-database-instance NAME" @@ -226,6 +235,81 @@ func newCreateDatabaseInstance() *cobra.Command { return cmd } +// start create-database-instance-role command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createDatabaseInstanceRoleOverrides []func( + *cobra.Command, + *database.CreateDatabaseInstanceRoleRequest, +) + +func newCreateDatabaseInstanceRole() *cobra.Command { + cmd := &cobra.Command{} + + var createDatabaseInstanceRoleReq database.CreateDatabaseInstanceRoleRequest + createDatabaseInstanceRoleReq.DatabaseInstanceRole = database.DatabaseInstanceRole{} + var createDatabaseInstanceRoleJson flags.JsonFlag + + cmd.Flags().Var(&createDatabaseInstanceRoleJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: complex arg: attributes + cmd.Flags().Var(&createDatabaseInstanceRoleReq.DatabaseInstanceRole.IdentityType, "identity-type", `The type of the role. Supported values: [GROUP, PG_ONLY, SERVICE_PRINCIPAL, USER]`) + cmd.Flags().Var(&createDatabaseInstanceRoleReq.DatabaseInstanceRole.MembershipRole, "membership-role", `An enum value for a standard role that this role is a member of. Supported values: [DATABRICKS_SUPERUSER]`) + cmd.Flags().StringVar(&createDatabaseInstanceRoleReq.DatabaseInstanceRole.Name, "name", createDatabaseInstanceRoleReq.DatabaseInstanceRole.Name, `The name of the role.`) + + cmd.Use = "create-database-instance-role INSTANCE_NAME" + cmd.Short = `Create a role for a Database Instance.` + cmd.Long = `Create a role for a Database Instance.` + + // This command is being previewed; hide from help output. + cmd.Hidden = true + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := createDatabaseInstanceRoleJson.Unmarshal(&createDatabaseInstanceRoleReq.DatabaseInstanceRole) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + createDatabaseInstanceRoleReq.InstanceName = args[0] + + response, err := w.Database.CreateDatabaseInstanceRole(ctx, createDatabaseInstanceRoleReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createDatabaseInstanceRoleOverrides { + fn(cmd, &createDatabaseInstanceRoleReq) + } + + return cmd +} + // start create-database-table command // Slice with functions to override default command behavior. @@ -250,6 +334,10 @@ func newCreateDatabaseTable() *cobra.Command { cmd.Use = "create-database-table NAME" cmd.Short = `Create a Database Table.` cmd.Long = `Create a Database Table. + + Create a Database Table. Useful for registering pre-existing PG tables in UC. + See CreateSyncedDatabaseTable for creating synced tables in PG from a source + table in UC. Arguments: NAME: Full three-part (catalog, schema, table) name of the table.` @@ -500,6 +588,66 @@ func newDeleteDatabaseInstance() *cobra.Command { return cmd } +// start delete-database-instance-role command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteDatabaseInstanceRoleOverrides []func( + *cobra.Command, + *database.DeleteDatabaseInstanceRoleRequest, +) + +func newDeleteDatabaseInstanceRole() *cobra.Command { + cmd := &cobra.Command{} + + var deleteDatabaseInstanceRoleReq database.DeleteDatabaseInstanceRoleRequest + + cmd.Flags().BoolVar(&deleteDatabaseInstanceRoleReq.AllowMissing, "allow-missing", deleteDatabaseInstanceRoleReq.AllowMissing, `This is the AIP standard name for the equivalent of Postgres' IF EXISTS option.`) + cmd.Flags().StringVar(&deleteDatabaseInstanceRoleReq.ReassignOwnedTo, "reassign-owned-to", deleteDatabaseInstanceRoleReq.ReassignOwnedTo, ``) + + cmd.Use = "delete-database-instance-role INSTANCE_NAME NAME" + cmd.Short = `Delete a role for a Database Instance.` + cmd.Long = `Delete a role for a Database Instance. + + Deletes a role for a Database Instance.` + + // This command is being previewed; hide from help output. + cmd.Hidden = true + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + deleteDatabaseInstanceRoleReq.InstanceName = args[0] + deleteDatabaseInstanceRoleReq.Name = args[1] + + err = w.Database.DeleteDatabaseInstanceRole(ctx, deleteDatabaseInstanceRoleReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteDatabaseInstanceRoleOverrides { + fn(cmd, &deleteDatabaseInstanceRoleReq) + } + + return cmd +} + // start delete-database-table command // Slice with functions to override default command behavior. @@ -670,6 +818,7 @@ func newGenerateDatabaseCredential() *cobra.Command { cmd.Flags().Var(&generateDatabaseCredentialJson, "json", `either inline JSON string or @path/to/file.json with request body`) + // TODO: array: claims // TODO: array: instance_names cmd.Flags().StringVar(&generateDatabaseCredentialReq.RequestId, "request-id", generateDatabaseCredentialReq.RequestId, ``) @@ -826,6 +975,63 @@ func newGetDatabaseInstance() *cobra.Command { return cmd } +// start get-database-instance-role command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getDatabaseInstanceRoleOverrides []func( + *cobra.Command, + *database.GetDatabaseInstanceRoleRequest, +) + +func newGetDatabaseInstanceRole() *cobra.Command { + cmd := &cobra.Command{} + + var getDatabaseInstanceRoleReq database.GetDatabaseInstanceRoleRequest + + cmd.Use = "get-database-instance-role INSTANCE_NAME NAME" + cmd.Short = `Get a role for a Database Instance.` + cmd.Long = `Get a role for a Database Instance. + + Gets a role for a Database Instance.` + + // This command is being previewed; hide from help output. + cmd.Hidden = true + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + getDatabaseInstanceRoleReq.InstanceName = args[0] + getDatabaseInstanceRoleReq.Name = args[1] + + response, err := w.Database.GetDatabaseInstanceRole(ctx, getDatabaseInstanceRoleReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getDatabaseInstanceRoleOverrides { + fn(cmd, &getDatabaseInstanceRoleReq) + } + + return cmd +} + // start get-database-table command // Slice with functions to override default command behavior. @@ -928,6 +1134,62 @@ func newGetSyncedDatabaseTable() *cobra.Command { return cmd } +// start list-database-instance-roles command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listDatabaseInstanceRolesOverrides []func( + *cobra.Command, + *database.ListDatabaseInstanceRolesRequest, +) + +func newListDatabaseInstanceRoles() *cobra.Command { + cmd := &cobra.Command{} + + var listDatabaseInstanceRolesReq database.ListDatabaseInstanceRolesRequest + + cmd.Flags().IntVar(&listDatabaseInstanceRolesReq.PageSize, "page-size", listDatabaseInstanceRolesReq.PageSize, `Upper bound for items returned.`) + cmd.Flags().StringVar(&listDatabaseInstanceRolesReq.PageToken, "page-token", listDatabaseInstanceRolesReq.PageToken, `Pagination token to go to the next page of Database Instances.`) + + cmd.Use = "list-database-instance-roles INSTANCE_NAME" + cmd.Short = `List roles for a Database Instance.` + cmd.Long = `List roles for a Database Instance. + + START OF PG ROLE APIs Section` + + // This command is being previewed; hide from help output. + cmd.Hidden = true + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + listDatabaseInstanceRolesReq.InstanceName = args[0] + + response := w.Database.ListDatabaseInstanceRoles(ctx, listDatabaseInstanceRolesReq) + return cmdio.RenderIterator(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listDatabaseInstanceRolesOverrides { + fn(cmd, &listDatabaseInstanceRolesReq) + } + + return cmd +} + // start list-database-instances command // Slice with functions to override default command behavior. @@ -996,6 +1258,11 @@ func newUpdateDatabaseInstance() *cobra.Command { cmd.Flags().Var(&updateDatabaseInstanceJson, "json", `either inline JSON string or @path/to/file.json with request body`) cmd.Flags().StringVar(&updateDatabaseInstanceReq.DatabaseInstance.Capacity, "capacity", updateDatabaseInstanceReq.DatabaseInstance.Capacity, `The sku of the instance.`) + // TODO: array: child_instance_refs + cmd.Flags().BoolVar(&updateDatabaseInstanceReq.DatabaseInstance.EnableReadableSecondaries, "enable-readable-secondaries", updateDatabaseInstanceReq.DatabaseInstance.EnableReadableSecondaries, `Whether to enable secondaries to serve read-only traffic.`) + cmd.Flags().IntVar(&updateDatabaseInstanceReq.DatabaseInstance.NodeCount, "node-count", updateDatabaseInstanceReq.DatabaseInstance.NodeCount, `The number of nodes in the instance, composed of 1 primary and 0 or more secondaries.`) + // TODO: complex arg: parent_instance_ref + cmd.Flags().IntVar(&updateDatabaseInstanceReq.DatabaseInstance.RetentionWindowInDays, "retention-window-in-days", updateDatabaseInstanceReq.DatabaseInstance.RetentionWindowInDays, `The retention window for the instance.`) cmd.Flags().BoolVar(&updateDatabaseInstanceReq.DatabaseInstance.Stopped, "stopped", updateDatabaseInstanceReq.DatabaseInstance.Stopped, `Whether the instance is stopped.`) cmd.Use = "update-database-instance NAME" diff --git a/cmd/workspace/default-warehouse-id/default-warehouse-id.go b/cmd/workspace/default-warehouse-id/default-warehouse-id.go new file mode 100755 index 0000000000..dec4078fe9 --- /dev/null +++ b/cmd/workspace/default-warehouse-id/default-warehouse-id.go @@ -0,0 +1,215 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package default_warehouse_id + +import ( + "fmt" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdctx" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/settings" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "default-warehouse-id", + Short: `Warehouse to be selected by default for users in this workspace.`, + Long: `Warehouse to be selected by default for users in this workspace. Covers SQL + workloads only and can be overridden by users.`, + + // This service is being previewed; hide from help output. + Hidden: true, + RunE: root.ReportUnknownSubcommand, + } + + // Add methods + cmd.AddCommand(newDelete()) + cmd.AddCommand(newGet()) + cmd.AddCommand(newUpdate()) + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start delete command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *settings.DeleteDefaultWarehouseIdRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq settings.DeleteDefaultWarehouseIdRequest + + cmd.Flags().StringVar(&deleteReq.Etag, "etag", deleteReq.Etag, `etag used for versioning.`) + + cmd.Use = "delete" + cmd.Short = `Delete the Default Warehouse Id setting.` + cmd.Long = `Delete the Default Warehouse Id setting. + + Reverts the Default Warehouse Id setting to its default value.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(0) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + response, err := w.Settings.DefaultWarehouseId().Delete(ctx, deleteReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +// start get command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *settings.GetDefaultWarehouseIdRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq settings.GetDefaultWarehouseIdRequest + + cmd.Flags().StringVar(&getReq.Etag, "etag", getReq.Etag, `etag used for versioning.`) + + cmd.Use = "get" + cmd.Short = `Get the Default Warehouse Id setting.` + cmd.Long = `Get the Default Warehouse Id setting. + + Gets the Default Warehouse Id setting.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(0) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + response, err := w.Settings.DefaultWarehouseId().Get(ctx, getReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +// start update command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *settings.UpdateDefaultWarehouseIdRequest, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq settings.UpdateDefaultWarehouseIdRequest + var updateJson flags.JsonFlag + + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Use = "update" + cmd.Short = `Update the Default Warehouse Id setting.` + cmd.Long = `Update the Default Warehouse Id setting. + + Updates the Default Warehouse Id setting.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := updateJson.Unmarshal(&updateReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } else { + return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") + } + + response, err := w.Settings.DefaultWarehouseId().Update(ctx, updateReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +// end service DefaultWarehouseId diff --git a/cmd/workspace/external-lineage/external-lineage.go b/cmd/workspace/external-lineage/external-lineage.go new file mode 100755 index 0000000000..1c63e1f2b6 --- /dev/null +++ b/cmd/workspace/external-lineage/external-lineage.go @@ -0,0 +1,320 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package external_lineage + +import ( + "fmt" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdctx" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/catalog" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "external-lineage", + Short: `External Lineage APIs enable defining and managing lineage relationships between Databricks objects and external systems.`, + Long: `External Lineage APIs enable defining and managing lineage relationships + between Databricks objects and external systems. These APIs allow users to + capture data flows connecting Databricks tables, models, and file paths with + external metadata objects. + + With these APIs, users can create, update, delete, and list lineage + relationships with support for column-level mappings and custom properties.`, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + RunE: root.ReportUnknownSubcommand, + } + + // Add methods + cmd.AddCommand(newCreateExternalLineageRelationship()) + cmd.AddCommand(newDeleteExternalLineageRelationship()) + cmd.AddCommand(newListExternalLineageRelationships()) + cmd.AddCommand(newUpdateExternalLineageRelationship()) + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start create-external-lineage-relationship command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createExternalLineageRelationshipOverrides []func( + *cobra.Command, + *catalog.CreateExternalLineageRelationshipRequest, +) + +func newCreateExternalLineageRelationship() *cobra.Command { + cmd := &cobra.Command{} + + var createExternalLineageRelationshipReq catalog.CreateExternalLineageRelationshipRequest + createExternalLineageRelationshipReq.ExternalLineageRelationship = catalog.CreateRequestExternalLineage{} + var createExternalLineageRelationshipJson flags.JsonFlag + + cmd.Flags().Var(&createExternalLineageRelationshipJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: columns + // TODO: map via StringToStringVar: properties + + cmd.Use = "create-external-lineage-relationship" + cmd.Short = `Create an external lineage relationship.` + cmd.Long = `Create an external lineage relationship. + + Creates an external lineage relationship between a Databricks or external + metadata object and another external metadata object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := createExternalLineageRelationshipJson.Unmarshal(&createExternalLineageRelationshipReq.ExternalLineageRelationship) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } else { + return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") + } + + response, err := w.ExternalLineage.CreateExternalLineageRelationship(ctx, createExternalLineageRelationshipReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createExternalLineageRelationshipOverrides { + fn(cmd, &createExternalLineageRelationshipReq) + } + + return cmd +} + +// start delete-external-lineage-relationship command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteExternalLineageRelationshipOverrides []func( + *cobra.Command, + *catalog.DeleteExternalLineageRelationshipRequest, +) + +func newDeleteExternalLineageRelationship() *cobra.Command { + cmd := &cobra.Command{} + + var deleteExternalLineageRelationshipReq catalog.DeleteExternalLineageRelationshipRequest + var deleteExternalLineageRelationshipJson flags.JsonFlag + + cmd.Flags().Var(&deleteExternalLineageRelationshipJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Use = "delete-external-lineage-relationship" + cmd.Short = `Delete an external lineage relationship.` + cmd.Long = `Delete an external lineage relationship. + + Deletes an external lineage relationship between a Databricks or external + metadata object and another external metadata object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := deleteExternalLineageRelationshipJson.Unmarshal(&deleteExternalLineageRelationshipReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } else { + return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") + } + + err = w.ExternalLineage.DeleteExternalLineageRelationship(ctx, deleteExternalLineageRelationshipReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteExternalLineageRelationshipOverrides { + fn(cmd, &deleteExternalLineageRelationshipReq) + } + + return cmd +} + +// start list-external-lineage-relationships command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listExternalLineageRelationshipsOverrides []func( + *cobra.Command, + *catalog.ListExternalLineageRelationshipsRequest, +) + +func newListExternalLineageRelationships() *cobra.Command { + cmd := &cobra.Command{} + + var listExternalLineageRelationshipsReq catalog.ListExternalLineageRelationshipsRequest + var listExternalLineageRelationshipsJson flags.JsonFlag + + cmd.Flags().Var(&listExternalLineageRelationshipsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().IntVar(&listExternalLineageRelationshipsReq.PageSize, "page-size", listExternalLineageRelationshipsReq.PageSize, ``) + cmd.Flags().StringVar(&listExternalLineageRelationshipsReq.PageToken, "page-token", listExternalLineageRelationshipsReq.PageToken, ``) + + cmd.Use = "list-external-lineage-relationships" + cmd.Short = `List external lineage relationships.` + cmd.Long = `List external lineage relationships. + + Lists external lineage relationships of a Databricks object or external + metadata given a supplied direction.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := listExternalLineageRelationshipsJson.Unmarshal(&listExternalLineageRelationshipsReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } else { + return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") + } + + response := w.ExternalLineage.ListExternalLineageRelationships(ctx, listExternalLineageRelationshipsReq) + return cmdio.RenderIterator(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listExternalLineageRelationshipsOverrides { + fn(cmd, &listExternalLineageRelationshipsReq) + } + + return cmd +} + +// start update-external-lineage-relationship command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateExternalLineageRelationshipOverrides []func( + *cobra.Command, + *catalog.UpdateExternalLineageRelationshipRequest, +) + +func newUpdateExternalLineageRelationship() *cobra.Command { + cmd := &cobra.Command{} + + var updateExternalLineageRelationshipReq catalog.UpdateExternalLineageRelationshipRequest + updateExternalLineageRelationshipReq.ExternalLineageRelationship = catalog.UpdateRequestExternalLineage{} + var updateExternalLineageRelationshipJson flags.JsonFlag + + cmd.Flags().Var(&updateExternalLineageRelationshipJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: columns + // TODO: map via StringToStringVar: properties + + cmd.Use = "update-external-lineage-relationship" + cmd.Short = `Update an external lineage relationship.` + cmd.Long = `Update an external lineage relationship. + + Updates an external lineage relationship between a Databricks or external + metadata object and another external metadata object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := updateExternalLineageRelationshipJson.Unmarshal(&updateExternalLineageRelationshipReq.ExternalLineageRelationship) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } else { + return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") + } + + response, err := w.ExternalLineage.UpdateExternalLineageRelationship(ctx, updateExternalLineageRelationshipReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateExternalLineageRelationshipOverrides { + fn(cmd, &updateExternalLineageRelationshipReq) + } + + return cmd +} + +// end service ExternalLineage diff --git a/cmd/workspace/external-locations/external-locations.go b/cmd/workspace/external-locations/external-locations.go index eb04fcab87..a0db1aeaa4 100755 --- a/cmd/workspace/external-locations/external-locations.go +++ b/cmd/workspace/external-locations/external-locations.go @@ -74,7 +74,7 @@ func newCreate() *cobra.Command { cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) - cmd.Flags().BoolVar(&createReq.EnableFileEvents, "enable-file-events", createReq.EnableFileEvents, `[Create:OPT Update:OPT] Whether to enable file events on this external location.`) + cmd.Flags().BoolVar(&createReq.EnableFileEvents, "enable-file-events", createReq.EnableFileEvents, `Whether to enable file events on this external location.`) // TODO: complex arg: encryption_details cmd.Flags().BoolVar(&createReq.Fallback, "fallback", createReq.Fallback, `Indicates whether fallback mode is enabled for this external location.`) // TODO: complex arg: file_event_queue @@ -347,12 +347,12 @@ func newUpdate() *cobra.Command { cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `User-provided free-form text description.`) cmd.Flags().StringVar(&updateReq.CredentialName, "credential-name", updateReq.CredentialName, `Name of the storage credential used with this location.`) - cmd.Flags().BoolVar(&updateReq.EnableFileEvents, "enable-file-events", updateReq.EnableFileEvents, `[Create:OPT Update:OPT] Whether to enable file events on this external location.`) + cmd.Flags().BoolVar(&updateReq.EnableFileEvents, "enable-file-events", updateReq.EnableFileEvents, `Whether to enable file events on this external location.`) // TODO: complex arg: encryption_details cmd.Flags().BoolVar(&updateReq.Fallback, "fallback", updateReq.Fallback, `Indicates whether fallback mode is enabled for this external location.`) // TODO: complex arg: file_event_queue cmd.Flags().BoolVar(&updateReq.Force, "force", updateReq.Force, `Force update even if changing url invalidates dependent external tables or mounts.`) - cmd.Flags().Var(&updateReq.IsolationMode, "isolation-mode", `. Supported values: [ISOLATION_MODE_ISOLATED, ISOLATION_MODE_OPEN]`) + cmd.Flags().Var(&updateReq.IsolationMode, "isolation-mode", `Supported values: [ISOLATION_MODE_ISOLATED, ISOLATION_MODE_OPEN]`) cmd.Flags().StringVar(&updateReq.NewName, "new-name", updateReq.NewName, `New name for the external location.`) cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `The owner of the external location.`) cmd.Flags().BoolVar(&updateReq.ReadOnly, "read-only", updateReq.ReadOnly, `Indicates whether the external location is read-only.`) diff --git a/cmd/workspace/external-metadata/external-metadata.go b/cmd/workspace/external-metadata/external-metadata.go new file mode 100755 index 0000000000..85681a3735 --- /dev/null +++ b/cmd/workspace/external-metadata/external-metadata.go @@ -0,0 +1,466 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package external_metadata + +import ( + "fmt" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdctx" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/catalog" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "external-metadata", + Short: `External Metadata objects enable customers to register and manage metadata about external systems within Unity Catalog.`, + Long: `External Metadata objects enable customers to register and manage metadata + about external systems within Unity Catalog. + + These APIs provide a standardized way to create, update, retrieve, list, and + delete external metadata objects. Fine-grained authorization ensures that only + users with appropriate permissions can view and manage external metadata + objects.`, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + RunE: root.ReportUnknownSubcommand, + } + + // Add methods + cmd.AddCommand(newCreateExternalMetadata()) + cmd.AddCommand(newDeleteExternalMetadata()) + cmd.AddCommand(newGetExternalMetadata()) + cmd.AddCommand(newListExternalMetadata()) + cmd.AddCommand(newUpdateExternalMetadata()) + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start create-external-metadata command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createExternalMetadataOverrides []func( + *cobra.Command, + *catalog.CreateExternalMetadataRequest, +) + +func newCreateExternalMetadata() *cobra.Command { + cmd := &cobra.Command{} + + var createExternalMetadataReq catalog.CreateExternalMetadataRequest + createExternalMetadataReq.ExternalMetadata = catalog.ExternalMetadata{} + var createExternalMetadataJson flags.JsonFlag + + cmd.Flags().Var(&createExternalMetadataJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: columns + cmd.Flags().StringVar(&createExternalMetadataReq.ExternalMetadata.Description, "description", createExternalMetadataReq.ExternalMetadata.Description, `User-provided free-form text description.`) + cmd.Flags().StringVar(&createExternalMetadataReq.ExternalMetadata.Owner, "owner", createExternalMetadataReq.ExternalMetadata.Owner, `Owner of the external metadata object.`) + // TODO: map via StringToStringVar: properties + cmd.Flags().StringVar(&createExternalMetadataReq.ExternalMetadata.Url, "url", createExternalMetadataReq.ExternalMetadata.Url, `URL associated with the external metadata object.`) + + cmd.Use = "create-external-metadata NAME SYSTEM_TYPE ENTITY_TYPE" + cmd.Short = `Create an external metadata object.` + cmd.Long = `Create an external metadata object. + + Creates a new external metadata object in the parent metastore if the caller + is a metastore admin or has the **CREATE_EXTERNAL_METADATA** privilege. Grants + **BROWSE** to all account users upon creation by default. + + Arguments: + NAME: Name of the external metadata object. + SYSTEM_TYPE: Type of external system. + Supported values: [ + AMAZON_REDSHIFT, + AZURE_SYNAPSE, + CONFLUENT, + DATABRICKS, + GOOGLE_BIGQUERY, + KAFKA, + LOOKER, + MICROSOFT_FABRIC, + MICROSOFT_SQL_SERVER, + MONGODB, + MYSQL, + ORACLE, + OTHER, + POSTGRESQL, + POWER_BI, + SALESFORCE, + SAP, + SERVICENOW, + SNOWFLAKE, + TABLEAU, + TERADATA, + WORKDAY, + ] + ENTITY_TYPE: Type of entity within the external system.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + if cmd.Flags().Changed("json") { + err := root.ExactArgs(0)(cmd, args) + if err != nil { + return fmt.Errorf("when --json flag is specified, no positional arguments are required. Provide 'name', 'system_type', 'entity_type' in your JSON input") + } + return nil + } + check := root.ExactArgs(3) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := createExternalMetadataJson.Unmarshal(&createExternalMetadataReq.ExternalMetadata) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + if !cmd.Flags().Changed("json") { + createExternalMetadataReq.ExternalMetadata.Name = args[0] + } + if !cmd.Flags().Changed("json") { + _, err = fmt.Sscan(args[1], &createExternalMetadataReq.ExternalMetadata.SystemType) + if err != nil { + return fmt.Errorf("invalid SYSTEM_TYPE: %s", args[1]) + } + } + if !cmd.Flags().Changed("json") { + createExternalMetadataReq.ExternalMetadata.EntityType = args[2] + } + + response, err := w.ExternalMetadata.CreateExternalMetadata(ctx, createExternalMetadataReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createExternalMetadataOverrides { + fn(cmd, &createExternalMetadataReq) + } + + return cmd +} + +// start delete-external-metadata command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteExternalMetadataOverrides []func( + *cobra.Command, + *catalog.DeleteExternalMetadataRequest, +) + +func newDeleteExternalMetadata() *cobra.Command { + cmd := &cobra.Command{} + + var deleteExternalMetadataReq catalog.DeleteExternalMetadataRequest + + cmd.Use = "delete-external-metadata NAME" + cmd.Short = `Delete an external metadata object.` + cmd.Long = `Delete an external metadata object. + + Deletes the external metadata object that matches the supplied name. The + caller must be a metastore admin, the owner of the external metadata object, + or a user that has the **MANAGE** privilege.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + deleteExternalMetadataReq.Name = args[0] + + err = w.ExternalMetadata.DeleteExternalMetadata(ctx, deleteExternalMetadataReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteExternalMetadataOverrides { + fn(cmd, &deleteExternalMetadataReq) + } + + return cmd +} + +// start get-external-metadata command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getExternalMetadataOverrides []func( + *cobra.Command, + *catalog.GetExternalMetadataRequest, +) + +func newGetExternalMetadata() *cobra.Command { + cmd := &cobra.Command{} + + var getExternalMetadataReq catalog.GetExternalMetadataRequest + + cmd.Use = "get-external-metadata NAME" + cmd.Short = `Get an external metadata object.` + cmd.Long = `Get an external metadata object. + + Gets the specified external metadata object in a metastore. The caller must be + a metastore admin, the owner of the external metadata object, or a user that + has the **BROWSE** privilege.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + getExternalMetadataReq.Name = args[0] + + response, err := w.ExternalMetadata.GetExternalMetadata(ctx, getExternalMetadataReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getExternalMetadataOverrides { + fn(cmd, &getExternalMetadataReq) + } + + return cmd +} + +// start list-external-metadata command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listExternalMetadataOverrides []func( + *cobra.Command, + *catalog.ListExternalMetadataRequest, +) + +func newListExternalMetadata() *cobra.Command { + cmd := &cobra.Command{} + + var listExternalMetadataReq catalog.ListExternalMetadataRequest + + cmd.Flags().IntVar(&listExternalMetadataReq.PageSize, "page-size", listExternalMetadataReq.PageSize, ``) + cmd.Flags().StringVar(&listExternalMetadataReq.PageToken, "page-token", listExternalMetadataReq.PageToken, ``) + + cmd.Use = "list-external-metadata" + cmd.Short = `List external metadata objects.` + cmd.Long = `List external metadata objects. + + Gets an array of external metadata objects in the metastore. If the caller is + the metastore admin, all external metadata objects will be retrieved. + Otherwise, only external metadata objects that the caller has **BROWSE** on + will be retrieved. There is no guarantee of a specific ordering of the + elements in the array.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(0) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + response := w.ExternalMetadata.ListExternalMetadata(ctx, listExternalMetadataReq) + return cmdio.RenderIterator(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listExternalMetadataOverrides { + fn(cmd, &listExternalMetadataReq) + } + + return cmd +} + +// start update-external-metadata command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateExternalMetadataOverrides []func( + *cobra.Command, + *catalog.UpdateExternalMetadataRequest, +) + +func newUpdateExternalMetadata() *cobra.Command { + cmd := &cobra.Command{} + + var updateExternalMetadataReq catalog.UpdateExternalMetadataRequest + updateExternalMetadataReq.ExternalMetadata = catalog.ExternalMetadata{} + var updateExternalMetadataJson flags.JsonFlag + + cmd.Flags().Var(&updateExternalMetadataJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: columns + cmd.Flags().StringVar(&updateExternalMetadataReq.ExternalMetadata.Description, "description", updateExternalMetadataReq.ExternalMetadata.Description, `User-provided free-form text description.`) + cmd.Flags().StringVar(&updateExternalMetadataReq.ExternalMetadata.Owner, "owner", updateExternalMetadataReq.ExternalMetadata.Owner, `Owner of the external metadata object.`) + // TODO: map via StringToStringVar: properties + cmd.Flags().StringVar(&updateExternalMetadataReq.ExternalMetadata.Url, "url", updateExternalMetadataReq.ExternalMetadata.Url, `URL associated with the external metadata object.`) + + cmd.Use = "update-external-metadata NAME SYSTEM_TYPE ENTITY_TYPE" + cmd.Short = `Update an external metadata object.` + cmd.Long = `Update an external metadata object. + + Updates the external metadata object that matches the supplied name. The + caller can only update either the owner or other metadata fields in one + request. The caller must be a metastore admin, the owner of the external + metadata object, or a user that has the **MODIFY** privilege. If the caller is + updating the owner, they must also have the **MANAGE** privilege. + + Arguments: + NAME: Name of the external metadata object. + SYSTEM_TYPE: Type of external system. + Supported values: [ + AMAZON_REDSHIFT, + AZURE_SYNAPSE, + CONFLUENT, + DATABRICKS, + GOOGLE_BIGQUERY, + KAFKA, + LOOKER, + MICROSOFT_FABRIC, + MICROSOFT_SQL_SERVER, + MONGODB, + MYSQL, + ORACLE, + OTHER, + POSTGRESQL, + POWER_BI, + SALESFORCE, + SAP, + SERVICENOW, + SNOWFLAKE, + TABLEAU, + TERADATA, + WORKDAY, + ] + ENTITY_TYPE: Type of entity within the external system.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + if cmd.Flags().Changed("json") { + err := root.ExactArgs(1)(cmd, args) + if err != nil { + return fmt.Errorf("when --json flag is specified, provide only NAME as positional arguments. Provide 'name', 'system_type', 'entity_type' in your JSON input") + } + return nil + } + check := root.ExactArgs(3) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := updateExternalMetadataJson.Unmarshal(&updateExternalMetadataReq.ExternalMetadata) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + updateExternalMetadataReq.Name = args[0] + if !cmd.Flags().Changed("json") { + _, err = fmt.Sscan(args[1], &updateExternalMetadataReq.ExternalMetadata.SystemType) + if err != nil { + return fmt.Errorf("invalid SYSTEM_TYPE: %s", args[1]) + } + } + if !cmd.Flags().Changed("json") { + updateExternalMetadataReq.ExternalMetadata.EntityType = args[2] + } + + response, err := w.ExternalMetadata.UpdateExternalMetadata(ctx, updateExternalMetadataReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateExternalMetadataOverrides { + fn(cmd, &updateExternalMetadataReq) + } + + return cmd +} + +// end service ExternalMetadata diff --git a/cmd/workspace/feature-store/feature-store.go b/cmd/workspace/feature-store/feature-store.go index 926225680a..0c16c76d45 100755 --- a/cmd/workspace/feature-store/feature-store.go +++ b/cmd/workspace/feature-store/feature-store.go @@ -72,15 +72,17 @@ func newCreateOnlineStore() *cobra.Command { cmd.Flags().Var(&createOnlineStoreJson, "json", `either inline JSON string or @path/to/file.json with request body`) - cmd.Flags().StringVar(&createOnlineStoreReq.OnlineStore.Capacity, "capacity", createOnlineStoreReq.OnlineStore.Capacity, `The capacity of the online store.`) + cmd.Flags().IntVar(&createOnlineStoreReq.OnlineStore.ReadReplicaCount, "read-replica-count", createOnlineStoreReq.OnlineStore.ReadReplicaCount, `The number of read replicas for the online store.`) - cmd.Use = "create-online-store NAME" + cmd.Use = "create-online-store NAME CAPACITY" cmd.Short = `Create an Online Feature Store.` cmd.Long = `Create an Online Feature Store. Arguments: NAME: The name of the online store. This is the unique identifier for the online - store.` + store. + CAPACITY: The capacity of the online store. Valid values are "CU_1", "CU_2", "CU_4", + "CU_8".` cmd.Annotations = make(map[string]string) @@ -88,11 +90,11 @@ func newCreateOnlineStore() *cobra.Command { if cmd.Flags().Changed("json") { err := root.ExactArgs(0)(cmd, args) if err != nil { - return fmt.Errorf("when --json flag is specified, no positional arguments are required. Provide 'name' in your JSON input") + return fmt.Errorf("when --json flag is specified, no positional arguments are required. Provide 'name', 'capacity' in your JSON input") } return nil } - check := root.ExactArgs(1) + check := root.ExactArgs(2) return check(cmd, args) } @@ -116,6 +118,9 @@ func newCreateOnlineStore() *cobra.Command { if !cmd.Flags().Changed("json") { createOnlineStoreReq.OnlineStore.Name = args[0] } + if !cmd.Flags().Changed("json") { + createOnlineStoreReq.OnlineStore.Capacity = args[1] + } response, err := w.FeatureStore.CreateOnlineStore(ctx, createOnlineStoreReq) if err != nil { @@ -382,20 +387,29 @@ func newUpdateOnlineStore() *cobra.Command { cmd.Flags().Var(&updateOnlineStoreJson, "json", `either inline JSON string or @path/to/file.json with request body`) - cmd.Flags().StringVar(&updateOnlineStoreReq.OnlineStore.Capacity, "capacity", updateOnlineStoreReq.OnlineStore.Capacity, `The capacity of the online store.`) + cmd.Flags().IntVar(&updateOnlineStoreReq.OnlineStore.ReadReplicaCount, "read-replica-count", updateOnlineStoreReq.OnlineStore.ReadReplicaCount, `The number of read replicas for the online store.`) - cmd.Use = "update-online-store NAME" + cmd.Use = "update-online-store NAME CAPACITY" cmd.Short = `Update an Online Feature Store.` cmd.Long = `Update an Online Feature Store. Arguments: NAME: The name of the online store. This is the unique identifier for the online - store.` + store. + CAPACITY: The capacity of the online store. Valid values are "CU_1", "CU_2", "CU_4", + "CU_8".` cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { - check := root.ExactArgs(1) + if cmd.Flags().Changed("json") { + err := root.ExactArgs(1)(cmd, args) + if err != nil { + return fmt.Errorf("when --json flag is specified, provide only NAME as positional arguments. Provide 'name', 'capacity' in your JSON input") + } + return nil + } + check := root.ExactArgs(2) return check(cmd, args) } @@ -417,6 +431,9 @@ func newUpdateOnlineStore() *cobra.Command { } } updateOnlineStoreReq.Name = args[0] + if !cmd.Flags().Changed("json") { + updateOnlineStoreReq.OnlineStore.Capacity = args[1] + } response, err := w.FeatureStore.UpdateOnlineStore(ctx, updateOnlineStoreReq) if err != nil { diff --git a/cmd/workspace/genie/genie.go b/cmd/workspace/genie/genie.go index 9958925f88..70d15a014a 100755 --- a/cmd/workspace/genie/genie.go +++ b/cmd/workspace/genie/genie.go @@ -36,17 +36,18 @@ func New() *cobra.Command { // Add methods cmd.AddCommand(newCreateMessage()) + cmd.AddCommand(newDeleteConversation()) cmd.AddCommand(newExecuteMessageAttachmentQuery()) cmd.AddCommand(newExecuteMessageQuery()) - cmd.AddCommand(newGenerateDownloadFullQueryResult()) - cmd.AddCommand(newGetDownloadFullQueryResult()) cmd.AddCommand(newGetMessage()) cmd.AddCommand(newGetMessageAttachmentQueryResult()) cmd.AddCommand(newGetMessageQueryResult()) cmd.AddCommand(newGetMessageQueryResultByAttachment()) cmd.AddCommand(newGetSpace()) + cmd.AddCommand(newListConversations()) cmd.AddCommand(newListSpaces()) cmd.AddCommand(newStartConversation()) + cmd.AddCommand(newTrashSpace()) // Apply optional overrides to this command. for _, fn := range cmdOverrides { @@ -161,101 +162,34 @@ func newCreateMessage() *cobra.Command { return cmd } -// start execute-message-attachment-query command - -// Slice with functions to override default command behavior. -// Functions can be added from the `init()` function in manually curated files in this directory. -var executeMessageAttachmentQueryOverrides []func( - *cobra.Command, - *dashboards.GenieExecuteMessageAttachmentQueryRequest, -) - -func newExecuteMessageAttachmentQuery() *cobra.Command { - cmd := &cobra.Command{} - - var executeMessageAttachmentQueryReq dashboards.GenieExecuteMessageAttachmentQueryRequest - - cmd.Use = "execute-message-attachment-query SPACE_ID CONVERSATION_ID MESSAGE_ID ATTACHMENT_ID" - cmd.Short = `Execute message attachment SQL query.` - cmd.Long = `Execute message attachment SQL query. - - Execute the SQL for a message query attachment. Use this API when the query - attachment has expired and needs to be re-executed. - - Arguments: - SPACE_ID: Genie space ID - CONVERSATION_ID: Conversation ID - MESSAGE_ID: Message ID - ATTACHMENT_ID: Attachment ID` - - cmd.Annotations = make(map[string]string) - - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := root.ExactArgs(4) - return check(cmd, args) - } - - cmd.PreRunE = root.MustWorkspaceClient - cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { - ctx := cmd.Context() - w := cmdctx.WorkspaceClient(ctx) - - executeMessageAttachmentQueryReq.SpaceId = args[0] - executeMessageAttachmentQueryReq.ConversationId = args[1] - executeMessageAttachmentQueryReq.MessageId = args[2] - executeMessageAttachmentQueryReq.AttachmentId = args[3] - - response, err := w.Genie.ExecuteMessageAttachmentQuery(ctx, executeMessageAttachmentQueryReq) - if err != nil { - return err - } - return cmdio.Render(ctx, response) - } - - // Disable completions since they are not applicable. - // Can be overridden by manual implementation in `override.go`. - cmd.ValidArgsFunction = cobra.NoFileCompletions - - // Apply optional overrides to this command. - for _, fn := range executeMessageAttachmentQueryOverrides { - fn(cmd, &executeMessageAttachmentQueryReq) - } - - return cmd -} - -// start execute-message-query command +// start delete-conversation command // Slice with functions to override default command behavior. // Functions can be added from the `init()` function in manually curated files in this directory. -var executeMessageQueryOverrides []func( +var deleteConversationOverrides []func( *cobra.Command, - *dashboards.GenieExecuteMessageQueryRequest, + *dashboards.GenieDeleteConversationRequest, ) -func newExecuteMessageQuery() *cobra.Command { +func newDeleteConversation() *cobra.Command { cmd := &cobra.Command{} - var executeMessageQueryReq dashboards.GenieExecuteMessageQueryRequest + var deleteConversationReq dashboards.GenieDeleteConversationRequest - cmd.Use = "execute-message-query SPACE_ID CONVERSATION_ID MESSAGE_ID" - cmd.Short = `[Deprecated] Execute SQL query in a conversation message.` - cmd.Long = `[Deprecated] Execute SQL query in a conversation message. + cmd.Use = "delete-conversation SPACE_ID CONVERSATION_ID" + cmd.Short = `Delete conversation.` + cmd.Long = `Delete conversation. - Execute the SQL query in the message. + Delete a conversation. Arguments: - SPACE_ID: Genie space ID - CONVERSATION_ID: Conversation ID - MESSAGE_ID: Message ID` - - // This command is being previewed; hide from help output. - cmd.Hidden = true + SPACE_ID: The ID associated with the Genie space where the conversation is located. + CONVERSATION_ID: The ID of the conversation to delete.` cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { - check := root.ExactArgs(3) + check := root.ExactArgs(2) return check(cmd, args) } @@ -264,15 +198,14 @@ func newExecuteMessageQuery() *cobra.Command { ctx := cmd.Context() w := cmdctx.WorkspaceClient(ctx) - executeMessageQueryReq.SpaceId = args[0] - executeMessageQueryReq.ConversationId = args[1] - executeMessageQueryReq.MessageId = args[2] + deleteConversationReq.SpaceId = args[0] + deleteConversationReq.ConversationId = args[1] - response, err := w.Genie.ExecuteMessageQuery(ctx, executeMessageQueryReq) + err = w.Genie.DeleteConversation(ctx, deleteConversationReq) if err != nil { return err } - return cmdio.Render(ctx, response) + return nil } // Disable completions since they are not applicable. @@ -280,38 +213,33 @@ func newExecuteMessageQuery() *cobra.Command { cmd.ValidArgsFunction = cobra.NoFileCompletions // Apply optional overrides to this command. - for _, fn := range executeMessageQueryOverrides { - fn(cmd, &executeMessageQueryReq) + for _, fn := range deleteConversationOverrides { + fn(cmd, &deleteConversationReq) } return cmd } -// start generate-download-full-query-result command +// start execute-message-attachment-query command // Slice with functions to override default command behavior. // Functions can be added from the `init()` function in manually curated files in this directory. -var generateDownloadFullQueryResultOverrides []func( +var executeMessageAttachmentQueryOverrides []func( *cobra.Command, - *dashboards.GenieGenerateDownloadFullQueryResultRequest, + *dashboards.GenieExecuteMessageAttachmentQueryRequest, ) -func newGenerateDownloadFullQueryResult() *cobra.Command { +func newExecuteMessageAttachmentQuery() *cobra.Command { cmd := &cobra.Command{} - var generateDownloadFullQueryResultReq dashboards.GenieGenerateDownloadFullQueryResultRequest + var executeMessageAttachmentQueryReq dashboards.GenieExecuteMessageAttachmentQueryRequest - cmd.Use = "generate-download-full-query-result SPACE_ID CONVERSATION_ID MESSAGE_ID ATTACHMENT_ID" - cmd.Short = `Generate full query result download.` - cmd.Long = `Generate full query result download. + cmd.Use = "execute-message-attachment-query SPACE_ID CONVERSATION_ID MESSAGE_ID ATTACHMENT_ID" + cmd.Short = `Execute message attachment SQL query.` + cmd.Long = `Execute message attachment SQL query. - Initiates a new SQL execution and returns a download_id that you can use to - track the progress of the download. The query result is stored in an external - link and can be retrieved using the [Get Download Full Query - Result](:method:genie/getdownloadfullqueryresult) API. Warning: Databricks - strongly recommends that you protect the URLs that are returned by the - EXTERNAL_LINKS disposition. See [Execute - Statement](:method:statementexecution/executestatement) for more details. + Execute the SQL for a message query attachment. Use this API when the query + attachment has expired and needs to be re-executed. Arguments: SPACE_ID: Genie space ID @@ -319,9 +247,6 @@ func newGenerateDownloadFullQueryResult() *cobra.Command { MESSAGE_ID: Message ID ATTACHMENT_ID: Attachment ID` - // This command is being previewed; hide from help output. - cmd.Hidden = true - cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { @@ -334,12 +259,12 @@ func newGenerateDownloadFullQueryResult() *cobra.Command { ctx := cmd.Context() w := cmdctx.WorkspaceClient(ctx) - generateDownloadFullQueryResultReq.SpaceId = args[0] - generateDownloadFullQueryResultReq.ConversationId = args[1] - generateDownloadFullQueryResultReq.MessageId = args[2] - generateDownloadFullQueryResultReq.AttachmentId = args[3] + executeMessageAttachmentQueryReq.SpaceId = args[0] + executeMessageAttachmentQueryReq.ConversationId = args[1] + executeMessageAttachmentQueryReq.MessageId = args[2] + executeMessageAttachmentQueryReq.AttachmentId = args[3] - response, err := w.Genie.GenerateDownloadFullQueryResult(ctx, generateDownloadFullQueryResultReq) + response, err := w.Genie.ExecuteMessageAttachmentQuery(ctx, executeMessageAttachmentQueryReq) if err != nil { return err } @@ -351,49 +276,37 @@ func newGenerateDownloadFullQueryResult() *cobra.Command { cmd.ValidArgsFunction = cobra.NoFileCompletions // Apply optional overrides to this command. - for _, fn := range generateDownloadFullQueryResultOverrides { - fn(cmd, &generateDownloadFullQueryResultReq) + for _, fn := range executeMessageAttachmentQueryOverrides { + fn(cmd, &executeMessageAttachmentQueryReq) } return cmd } -// start get-download-full-query-result command +// start execute-message-query command // Slice with functions to override default command behavior. // Functions can be added from the `init()` function in manually curated files in this directory. -var getDownloadFullQueryResultOverrides []func( +var executeMessageQueryOverrides []func( *cobra.Command, - *dashboards.GenieGetDownloadFullQueryResultRequest, + *dashboards.GenieExecuteMessageQueryRequest, ) -func newGetDownloadFullQueryResult() *cobra.Command { +func newExecuteMessageQuery() *cobra.Command { cmd := &cobra.Command{} - var getDownloadFullQueryResultReq dashboards.GenieGetDownloadFullQueryResultRequest + var executeMessageQueryReq dashboards.GenieExecuteMessageQueryRequest - cmd.Use = "get-download-full-query-result SPACE_ID CONVERSATION_ID MESSAGE_ID ATTACHMENT_ID DOWNLOAD_ID" - cmd.Short = `Get download full query result.` - cmd.Long = `Get download full query result. + cmd.Use = "execute-message-query SPACE_ID CONVERSATION_ID MESSAGE_ID" + cmd.Short = `[Deprecated] Execute SQL query in a conversation message.` + cmd.Long = `[Deprecated] Execute SQL query in a conversation message. - After [Generating a Full Query Result - Download](:method:genie/getdownloadfullqueryresult) and successfully receiving - a download_id, use this API to poll the download progress. When the download - is complete, the API returns one or more external links to the query result - files. Warning: Databricks strongly recommends that you protect the URLs that - are returned by the EXTERNAL_LINKS disposition. You must not set an - Authorization header in download requests. When using the EXTERNAL_LINKS - disposition, Databricks returns presigned URLs that grant temporary access to - data. See [Execute Statement](:method:statementexecution/executestatement) for - more details. + Execute the SQL query in the message. Arguments: SPACE_ID: Genie space ID CONVERSATION_ID: Conversation ID - MESSAGE_ID: Message ID - ATTACHMENT_ID: Attachment ID - DOWNLOAD_ID: Download ID. This ID is provided by the [Generate Download - endpoint](:method:genie/generateDownloadFullQueryResult)` + MESSAGE_ID: Message ID` // This command is being previewed; hide from help output. cmd.Hidden = true @@ -401,7 +314,7 @@ func newGetDownloadFullQueryResult() *cobra.Command { cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { - check := root.ExactArgs(5) + check := root.ExactArgs(3) return check(cmd, args) } @@ -410,13 +323,11 @@ func newGetDownloadFullQueryResult() *cobra.Command { ctx := cmd.Context() w := cmdctx.WorkspaceClient(ctx) - getDownloadFullQueryResultReq.SpaceId = args[0] - getDownloadFullQueryResultReq.ConversationId = args[1] - getDownloadFullQueryResultReq.MessageId = args[2] - getDownloadFullQueryResultReq.AttachmentId = args[3] - getDownloadFullQueryResultReq.DownloadId = args[4] + executeMessageQueryReq.SpaceId = args[0] + executeMessageQueryReq.ConversationId = args[1] + executeMessageQueryReq.MessageId = args[2] - response, err := w.Genie.GetDownloadFullQueryResult(ctx, getDownloadFullQueryResultReq) + response, err := w.Genie.ExecuteMessageQuery(ctx, executeMessageQueryReq) if err != nil { return err } @@ -428,8 +339,8 @@ func newGetDownloadFullQueryResult() *cobra.Command { cmd.ValidArgsFunction = cobra.NoFileCompletions // Apply optional overrides to this command. - for _, fn := range getDownloadFullQueryResultOverrides { - fn(cmd, &getDownloadFullQueryResultReq) + for _, fn := range executeMessageQueryOverrides { + fn(cmd, &executeMessageQueryReq) } return cmd @@ -749,6 +660,65 @@ func newGetSpace() *cobra.Command { return cmd } +// start list-conversations command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listConversationsOverrides []func( + *cobra.Command, + *dashboards.GenieListConversationsRequest, +) + +func newListConversations() *cobra.Command { + cmd := &cobra.Command{} + + var listConversationsReq dashboards.GenieListConversationsRequest + + cmd.Flags().IntVar(&listConversationsReq.PageSize, "page-size", listConversationsReq.PageSize, `Maximum number of conversations to return per page.`) + cmd.Flags().StringVar(&listConversationsReq.PageToken, "page-token", listConversationsReq.PageToken, `Token to get the next page of results.`) + + cmd.Use = "list-conversations SPACE_ID" + cmd.Short = `List conversations in a Genie Space.` + cmd.Long = `List conversations in a Genie Space. + + Get a list of conversations in a Genie Space. + + Arguments: + SPACE_ID: The ID of the Genie space to retrieve conversations from.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + listConversationsReq.SpaceId = args[0] + + response, err := w.Genie.ListConversations(ctx, listConversationsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listConversationsOverrides { + fn(cmd, &listConversationsReq) + } + + return cmd +} + // start list-spaces command // Slice with functions to override default command behavior. @@ -772,9 +742,6 @@ func newListSpaces() *cobra.Command { Get list of Genie Spaces.` - // This command is being previewed; hide from help output. - cmd.Hidden = true - cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { @@ -908,4 +875,60 @@ func newStartConversation() *cobra.Command { return cmd } +// start trash-space command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var trashSpaceOverrides []func( + *cobra.Command, + *dashboards.GenieTrashSpaceRequest, +) + +func newTrashSpace() *cobra.Command { + cmd := &cobra.Command{} + + var trashSpaceReq dashboards.GenieTrashSpaceRequest + + cmd.Use = "trash-space SPACE_ID" + cmd.Short = `Trash Genie Space.` + cmd.Long = `Trash Genie Space. + + Move a Genie Space to the trash. + + Arguments: + SPACE_ID: The ID associated with the Genie space to be sent to the trash.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + trashSpaceReq.SpaceId = args[0] + + err = w.Genie.TrashSpace(ctx, trashSpaceReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range trashSpaceOverrides { + fn(cmd, &trashSpaceReq) + } + + return cmd +} + // end service Genie diff --git a/cmd/workspace/git-credentials/git-credentials.go b/cmd/workspace/git-credentials/git-credentials.go index 61bcca0417..16977d7643 100755 --- a/cmd/workspace/git-credentials/git-credentials.go +++ b/cmd/workspace/git-credentials/git-credentials.go @@ -67,6 +67,8 @@ func newCreate() *cobra.Command { cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) cmd.Flags().StringVar(&createReq.GitUsername, "git-username", createReq.GitUsername, `The username or email provided with your Git provider account, depending on which provider you are using.`) + cmd.Flags().BoolVar(&createReq.IsDefaultForProvider, "is-default-for-provider", createReq.IsDefaultForProvider, `if the credential is the default for the given provider.`) + cmd.Flags().StringVar(&createReq.Name, "name", createReq.Name, `the name of the git credential, used for identification and ease of lookup.`) cmd.Flags().StringVar(&createReq.PersonalAccessToken, "personal-access-token", createReq.PersonalAccessToken, `The personal access token used to authenticate to the corresponding Git provider.`) cmd.Use = "create GIT_PROVIDER" @@ -338,6 +340,8 @@ func newUpdate() *cobra.Command { cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) cmd.Flags().StringVar(&updateReq.GitUsername, "git-username", updateReq.GitUsername, `The username or email provided with your Git provider account, depending on which provider you are using.`) + cmd.Flags().BoolVar(&updateReq.IsDefaultForProvider, "is-default-for-provider", updateReq.IsDefaultForProvider, `if the credential is the default for the given provider.`) + cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `the name of the git credential, used for identification and ease of lookup.`) cmd.Flags().StringVar(&updateReq.PersonalAccessToken, "personal-access-token", updateReq.PersonalAccessToken, `The personal access token used to authenticate to the corresponding Git provider.`) cmd.Use = "update CREDENTIAL_ID GIT_PROVIDER" diff --git a/cmd/workspace/groups.go b/cmd/workspace/groups.go index 817f915345..e7f2983dbe 100644 --- a/cmd/workspace/groups.go +++ b/cmd/workspace/groups.go @@ -88,5 +88,9 @@ func Groups() []cobra.Group { ID: "qualitymonitorv2", Title: "Quality Monitor v2", }, + { + ID: "oauth2", + Title: "OAuth", + }, } } diff --git a/cmd/workspace/ip-access-lists/ip-access-lists.go b/cmd/workspace/ip-access-lists/ip-access-lists.go index 7ac0fe80d6..c1646a4cb3 100755 --- a/cmd/workspace/ip-access-lists/ip-access-lists.go +++ b/cmd/workspace/ip-access-lists/ip-access-lists.go @@ -107,12 +107,7 @@ func newCreate() *cobra.Command { Arguments: LABEL: Label for the IP access list. This **cannot** be empty. - LIST_TYPE: Type of IP access list. Valid values are as follows and are - case-sensitive: - - * ALLOW: An allow list. Include this IP or range. * BLOCK: A block - list. Exclude this IP or range. IP addresses in the block list are - excluded even if they are included in an allow list. + LIST_TYPE: Supported values: [ALLOW, BLOCK]` cmd.Annotations = make(map[string]string) @@ -389,12 +384,7 @@ func newReplace() *cobra.Command { Arguments: IP_ACCESS_LIST_ID: The ID for the corresponding IP access list LABEL: Label for the IP access list. This **cannot** be empty. - LIST_TYPE: Type of IP access list. Valid values are as follows and are - case-sensitive: - - * ALLOW: An allow list. Include this IP or range. * BLOCK: A block - list. Exclude this IP or range. IP addresses in the block list are - excluded even if they are included in an allow list. + LIST_TYPE: Supported values: [ALLOW, BLOCK] ENABLED: Specifies whether this IP access list is enabled.` @@ -485,7 +475,7 @@ func newUpdate() *cobra.Command { cmd.Flags().BoolVar(&updateReq.Enabled, "enabled", updateReq.Enabled, `Specifies whether this IP access list is enabled.`) // TODO: array: ip_addresses cmd.Flags().StringVar(&updateReq.Label, "label", updateReq.Label, `Label for the IP access list.`) - cmd.Flags().Var(&updateReq.ListType, "list-type", `Type of IP access list. Supported values: [ALLOW, BLOCK]`) + cmd.Flags().Var(&updateReq.ListType, "list-type", `Supported values: [ALLOW, BLOCK]`) cmd.Use = "update IP_ACCESS_LIST_ID" cmd.Short = `Update access list.` diff --git a/cmd/workspace/lakeview-embedded/lakeview-embedded.go b/cmd/workspace/lakeview-embedded/lakeview-embedded.go index ec5b527d0a..ef585adf9e 100755 --- a/cmd/workspace/lakeview-embedded/lakeview-embedded.go +++ b/cmd/workspace/lakeview-embedded/lakeview-embedded.go @@ -59,13 +59,7 @@ func newGetPublishedDashboardTokenInfo() *cobra.Command { cmd.Long = `Read an information of a published dashboard to mint an OAuth token. Get a required authorization details and scopes of a published dashboard to - mint an OAuth token. The authorization_details can be enriched to apply - additional restriction. - - Example: Adding the following authorization_details object to downscope the - viewer permission to specific table { type: "unity_catalog_privileges", - privileges: ["SELECT"], object_type: "TABLE", object_full_path: - "main.default.testdata" } + mint an OAuth token. Arguments: DASHBOARD_ID: UUID identifying the published dashboard.` diff --git a/cmd/workspace/materialized-features/materialized-features.go b/cmd/workspace/materialized-features/materialized-features.go new file mode 100755 index 0000000000..cec625c8e0 --- /dev/null +++ b/cmd/workspace/materialized-features/materialized-features.go @@ -0,0 +1,433 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package materialized_features + +import ( + "fmt" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdctx" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/ml" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "materialized-features", + Short: `Materialized Features are columns in tables and views that can be directly used as features to train and serve ML models.`, + Long: `Materialized Features are columns in tables and views that can be directly + used as features to train and serve ML models.`, + GroupID: "ml", + Annotations: map[string]string{ + "package": "ml", + }, + + // This service is being previewed; hide from help output. + Hidden: true, + RunE: root.ReportUnknownSubcommand, + } + + // Add methods + cmd.AddCommand(newCreateFeatureTag()) + cmd.AddCommand(newDeleteFeatureTag()) + cmd.AddCommand(newGetFeatureLineage()) + cmd.AddCommand(newGetFeatureTag()) + cmd.AddCommand(newListFeatureTags()) + cmd.AddCommand(newUpdateFeatureTag()) + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start create-feature-tag command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createFeatureTagOverrides []func( + *cobra.Command, + *ml.CreateFeatureTagRequest, +) + +func newCreateFeatureTag() *cobra.Command { + cmd := &cobra.Command{} + + var createFeatureTagReq ml.CreateFeatureTagRequest + createFeatureTagReq.FeatureTag = ml.FeatureTag{} + var createFeatureTagJson flags.JsonFlag + + cmd.Flags().Var(&createFeatureTagJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().StringVar(&createFeatureTagReq.FeatureTag.Value, "value", createFeatureTagReq.FeatureTag.Value, ``) + + cmd.Use = "create-feature-tag TABLE_NAME FEATURE_NAME KEY" + cmd.Short = `Create a feature tag.` + cmd.Long = `Create a feature tag. + + Creates a FeatureTag.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + if cmd.Flags().Changed("json") { + err := root.ExactArgs(2)(cmd, args) + if err != nil { + return fmt.Errorf("when --json flag is specified, provide only TABLE_NAME, FEATURE_NAME as positional arguments. Provide 'key' in your JSON input") + } + return nil + } + check := root.ExactArgs(3) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := createFeatureTagJson.Unmarshal(&createFeatureTagReq.FeatureTag) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + createFeatureTagReq.TableName = args[0] + createFeatureTagReq.FeatureName = args[1] + if !cmd.Flags().Changed("json") { + createFeatureTagReq.FeatureTag.Key = args[2] + } + + response, err := w.MaterializedFeatures.CreateFeatureTag(ctx, createFeatureTagReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createFeatureTagOverrides { + fn(cmd, &createFeatureTagReq) + } + + return cmd +} + +// start delete-feature-tag command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteFeatureTagOverrides []func( + *cobra.Command, + *ml.DeleteFeatureTagRequest, +) + +func newDeleteFeatureTag() *cobra.Command { + cmd := &cobra.Command{} + + var deleteFeatureTagReq ml.DeleteFeatureTagRequest + + cmd.Use = "delete-feature-tag TABLE_NAME FEATURE_NAME KEY" + cmd.Short = `Delete a feature tag.` + cmd.Long = `Delete a feature tag. + + Deletes a FeatureTag. + + Arguments: + TABLE_NAME: The name of the feature table. + FEATURE_NAME: The name of the feature within the feature table. + KEY: The key of the tag to delete.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(3) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + deleteFeatureTagReq.TableName = args[0] + deleteFeatureTagReq.FeatureName = args[1] + deleteFeatureTagReq.Key = args[2] + + err = w.MaterializedFeatures.DeleteFeatureTag(ctx, deleteFeatureTagReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteFeatureTagOverrides { + fn(cmd, &deleteFeatureTagReq) + } + + return cmd +} + +// start get-feature-lineage command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getFeatureLineageOverrides []func( + *cobra.Command, + *ml.GetFeatureLineageRequest, +) + +func newGetFeatureLineage() *cobra.Command { + cmd := &cobra.Command{} + + var getFeatureLineageReq ml.GetFeatureLineageRequest + + cmd.Use = "get-feature-lineage TABLE_NAME FEATURE_NAME" + cmd.Short = `Get Feature Lineage.` + cmd.Long = `Get Feature Lineage. + + Arguments: + TABLE_NAME: The full name of the feature table in Unity Catalog. + FEATURE_NAME: The name of the feature.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + getFeatureLineageReq.TableName = args[0] + getFeatureLineageReq.FeatureName = args[1] + + response, err := w.MaterializedFeatures.GetFeatureLineage(ctx, getFeatureLineageReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getFeatureLineageOverrides { + fn(cmd, &getFeatureLineageReq) + } + + return cmd +} + +// start get-feature-tag command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getFeatureTagOverrides []func( + *cobra.Command, + *ml.GetFeatureTagRequest, +) + +func newGetFeatureTag() *cobra.Command { + cmd := &cobra.Command{} + + var getFeatureTagReq ml.GetFeatureTagRequest + + cmd.Use = "get-feature-tag TABLE_NAME FEATURE_NAME KEY" + cmd.Short = `Get a feature tag.` + cmd.Long = `Get a feature tag. + + Gets a FeatureTag.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(3) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + getFeatureTagReq.TableName = args[0] + getFeatureTagReq.FeatureName = args[1] + getFeatureTagReq.Key = args[2] + + response, err := w.MaterializedFeatures.GetFeatureTag(ctx, getFeatureTagReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getFeatureTagOverrides { + fn(cmd, &getFeatureTagReq) + } + + return cmd +} + +// start list-feature-tags command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listFeatureTagsOverrides []func( + *cobra.Command, + *ml.ListFeatureTagsRequest, +) + +func newListFeatureTags() *cobra.Command { + cmd := &cobra.Command{} + + var listFeatureTagsReq ml.ListFeatureTagsRequest + + cmd.Flags().IntVar(&listFeatureTagsReq.PageSize, "page-size", listFeatureTagsReq.PageSize, `The maximum number of results to return.`) + cmd.Flags().StringVar(&listFeatureTagsReq.PageToken, "page-token", listFeatureTagsReq.PageToken, `Pagination token to go to the next page based on a previous query.`) + + cmd.Use = "list-feature-tags TABLE_NAME FEATURE_NAME" + cmd.Short = `List all feature tags.` + cmd.Long = `List all feature tags. + + Lists FeatureTags.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + listFeatureTagsReq.TableName = args[0] + listFeatureTagsReq.FeatureName = args[1] + + response := w.MaterializedFeatures.ListFeatureTags(ctx, listFeatureTagsReq) + return cmdio.RenderIterator(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listFeatureTagsOverrides { + fn(cmd, &listFeatureTagsReq) + } + + return cmd +} + +// start update-feature-tag command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateFeatureTagOverrides []func( + *cobra.Command, + *ml.UpdateFeatureTagRequest, +) + +func newUpdateFeatureTag() *cobra.Command { + cmd := &cobra.Command{} + + var updateFeatureTagReq ml.UpdateFeatureTagRequest + updateFeatureTagReq.FeatureTag = ml.FeatureTag{} + var updateFeatureTagJson flags.JsonFlag + + cmd.Flags().Var(&updateFeatureTagJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().StringVar(&updateFeatureTagReq.UpdateMask, "update-mask", updateFeatureTagReq.UpdateMask, `The list of fields to update.`) + cmd.Flags().StringVar(&updateFeatureTagReq.FeatureTag.Value, "value", updateFeatureTagReq.FeatureTag.Value, ``) + + cmd.Use = "update-feature-tag TABLE_NAME FEATURE_NAME KEY" + cmd.Short = `Update a feature tag.` + cmd.Long = `Update a feature tag. + + Updates a FeatureTag.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(3) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := updateFeatureTagJson.Unmarshal(&updateFeatureTagReq.FeatureTag) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + updateFeatureTagReq.TableName = args[0] + updateFeatureTagReq.FeatureName = args[1] + updateFeatureTagReq.Key = args[2] + + response, err := w.MaterializedFeatures.UpdateFeatureTag(ctx, updateFeatureTagReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateFeatureTagOverrides { + fn(cmd, &updateFeatureTagReq) + } + + return cmd +} + +// end service MaterializedFeatures diff --git a/cmd/workspace/model-registry/model-registry.go b/cmd/workspace/model-registry/model-registry.go index f874f3b2df..6087b50ff8 100755 --- a/cmd/workspace/model-registry/model-registry.go +++ b/cmd/workspace/model-registry/model-registry.go @@ -118,8 +118,7 @@ func newApproveTransitionRequest() *cobra.Command { * Production: Production stage. - * Archived: Archived stage. - Supported values: [Archived, None, Production, Staging] + * Archived: Archived stage. ARCHIVE_EXISTING_VERSIONS: Specifies whether to archive all current model versions in the target stage.` @@ -161,10 +160,7 @@ func newApproveTransitionRequest() *cobra.Command { approveTransitionRequestReq.Version = args[1] } if !cmd.Flags().Changed("json") { - _, err = fmt.Sscan(args[2], &approveTransitionRequestReq.Stage) - if err != nil { - return fmt.Errorf("invalid STAGE: %s", args[2]) - } + approveTransitionRequestReq.Stage = args[2] } if !cmd.Flags().Changed("json") { _, err = fmt.Sscan(args[3], &approveTransitionRequestReq.ArchiveExistingVersions) @@ -307,7 +303,6 @@ func newCreateModel() *cobra.Command { cmd.Long = `Create a model. Creates a new registered model with the name specified in the request body. - Throws RESOURCE_ALREADY_EXISTS if a registered model with the given name exists. @@ -493,8 +488,7 @@ func newCreateTransitionRequest() *cobra.Command { * Production: Production stage. - * Archived: Archived stage. - Supported values: [Archived, None, Production, Staging]` + * Archived: Archived stage.` cmd.Annotations = make(map[string]string) @@ -534,10 +528,7 @@ func newCreateTransitionRequest() *cobra.Command { createTransitionRequestReq.Version = args[1] } if !cmd.Flags().Changed("json") { - _, err = fmt.Sscan(args[2], &createTransitionRequestReq.Stage) - if err != nil { - return fmt.Errorf("invalid STAGE: %s", args[2]) - } + createTransitionRequestReq.Stage = args[2] } response, err := w.ModelRegistry.CreateTransitionRequest(ctx, createTransitionRequestReq) @@ -586,9 +577,7 @@ func newCreateWebhook() *cobra.Command { cmd.Short = `Create a webhook.` cmd.Long = `Create a webhook. - **NOTE**: This endpoint is in Public Preview. - - Creates a registry webhook.` + **NOTE:** This endpoint is in Public Preview. Creates a registry webhook.` cmd.Annotations = make(map[string]string) @@ -954,8 +943,7 @@ func newDeleteTransitionRequest() *cobra.Command { * Production: Production stage. - * Archived: Archived stage. - Supported values: [Archived, None, Production, Staging] + * Archived: Archived stage. CREATOR: Username of the user who created this request. Of the transition requests matching the specified details, only the one transition created by this user will be deleted.` @@ -974,17 +962,14 @@ func newDeleteTransitionRequest() *cobra.Command { deleteTransitionRequestReq.Name = args[0] deleteTransitionRequestReq.Version = args[1] - _, err = fmt.Sscan(args[2], &deleteTransitionRequestReq.Stage) - if err != nil { - return fmt.Errorf("invalid STAGE: %s", args[2]) - } + deleteTransitionRequestReq.Stage = args[2] deleteTransitionRequestReq.Creator = args[3] - err = w.ModelRegistry.DeleteTransitionRequest(ctx, deleteTransitionRequestReq) + response, err := w.ModelRegistry.DeleteTransitionRequest(ctx, deleteTransitionRequestReq) if err != nil { return err } - return nil + return cmdio.Render(ctx, response) } // Disable completions since they are not applicable. @@ -1013,20 +998,19 @@ func newDeleteWebhook() *cobra.Command { var deleteWebhookReq ml.DeleteWebhookRequest - cmd.Flags().StringVar(&deleteWebhookReq.Id, "id", deleteWebhookReq.Id, `Webhook ID required to delete a registry webhook.`) - - cmd.Use = "delete-webhook" + cmd.Use = "delete-webhook ID" cmd.Short = `Delete a webhook.` cmd.Long = `Delete a webhook. - **NOTE:** This endpoint is in Public Preview. - - Deletes a registry webhook.` + **NOTE:** This endpoint is in Public Preview. Deletes a registry webhook. + + Arguments: + ID: Webhook ID required to delete a registry webhook.` cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { - check := root.ExactArgs(0) + check := root.ExactArgs(1) return check(cmd, args) } @@ -1035,6 +1019,8 @@ func newDeleteWebhook() *cobra.Command { ctx := cmd.Context() w := cmdctx.WorkspaceClient(ctx) + deleteWebhookReq.Id = args[0] + err = w.ModelRegistry.DeleteWebhook(ctx, deleteWebhookReq) if err != nil { return err @@ -1210,8 +1196,6 @@ func newGetModelVersion() *cobra.Command { cmd.Use = "get-model-version NAME VERSION" cmd.Short = `Get a model version.` cmd.Long = `Get a model version. - - Get a model version. Arguments: NAME: Name of the registered model @@ -1436,7 +1420,7 @@ func newListModels() *cobra.Command { var listModelsReq ml.ListModelsRequest - cmd.Flags().IntVar(&listModelsReq.MaxResults, "max-results", listModelsReq.MaxResults, `Maximum number of registered models desired.`) + cmd.Flags().Int64Var(&listModelsReq.MaxResults, "max-results", listModelsReq.MaxResults, `Maximum number of registered models desired.`) cmd.Flags().StringVar(&listModelsReq.PageToken, "page-token", listModelsReq.PageToken, `Pagination token to go to the next page based on a previous query.`) cmd.Use = "list-models" @@ -1495,7 +1479,7 @@ func newListTransitionRequests() *cobra.Command { Gets a list of all open stage transition requests for the model version. Arguments: - NAME: Name of the model. + NAME: Name of the registered model. VERSION: Version of the model.` cmd.Annotations = make(map[string]string) @@ -1544,16 +1528,15 @@ func newListWebhooks() *cobra.Command { var listWebhooksReq ml.ListWebhooksRequest // TODO: array: events - cmd.Flags().StringVar(&listWebhooksReq.ModelName, "model-name", listWebhooksReq.ModelName, `If not specified, all webhooks associated with the specified events are listed, regardless of their associated model.`) + cmd.Flags().Int64Var(&listWebhooksReq.MaxResults, "max-results", listWebhooksReq.MaxResults, ``) + cmd.Flags().StringVar(&listWebhooksReq.ModelName, "model-name", listWebhooksReq.ModelName, `Registered model name If not specified, all webhooks associated with the specified events are listed, regardless of their associated model.`) cmd.Flags().StringVar(&listWebhooksReq.PageToken, "page-token", listWebhooksReq.PageToken, `Token indicating the page of artifact results to fetch.`) cmd.Use = "list-webhooks" cmd.Short = `List registry webhooks.` cmd.Long = `List registry webhooks. - **NOTE:** This endpoint is in Public Preview. - - Lists all registry webhooks.` + **NOTE:** This endpoint is in Public Preview. Lists all registry webhooks.` cmd.Annotations = make(map[string]string) @@ -1619,8 +1602,7 @@ func newRejectTransitionRequest() *cobra.Command { * Production: Production stage. - * Archived: Archived stage. - Supported values: [Archived, None, Production, Staging]` + * Archived: Archived stage.` cmd.Annotations = make(map[string]string) @@ -1660,10 +1642,7 @@ func newRejectTransitionRequest() *cobra.Command { rejectTransitionRequestReq.Version = args[1] } if !cmd.Flags().Changed("json") { - _, err = fmt.Sscan(args[2], &rejectTransitionRequestReq.Stage) - if err != nil { - return fmt.Errorf("invalid STAGE: %s", args[2]) - } + rejectTransitionRequestReq.Stage = args[2] } response, err := w.ModelRegistry.RejectTransitionRequest(ctx, rejectTransitionRequestReq) @@ -1782,13 +1761,13 @@ func newSearchModelVersions() *cobra.Command { var searchModelVersionsReq ml.SearchModelVersionsRequest cmd.Flags().StringVar(&searchModelVersionsReq.Filter, "filter", searchModelVersionsReq.Filter, `String filter condition, like "name='my-model-name'".`) - cmd.Flags().IntVar(&searchModelVersionsReq.MaxResults, "max-results", searchModelVersionsReq.MaxResults, `Maximum number of models desired.`) + cmd.Flags().Int64Var(&searchModelVersionsReq.MaxResults, "max-results", searchModelVersionsReq.MaxResults, `Maximum number of models desired.`) // TODO: array: order_by cmd.Flags().StringVar(&searchModelVersionsReq.PageToken, "page-token", searchModelVersionsReq.PageToken, `Pagination token to go to next page based on previous search query.`) cmd.Use = "search-model-versions" - cmd.Short = `Searches model versions.` - cmd.Long = `Searches model versions. + cmd.Short = `Search model versions.` + cmd.Long = `Search model versions. Searches for specific model versions based on the supplied __filter__.` @@ -1835,7 +1814,7 @@ func newSearchModels() *cobra.Command { var searchModelsReq ml.SearchModelsRequest cmd.Flags().StringVar(&searchModelsReq.Filter, "filter", searchModelsReq.Filter, `String filter condition, like "name LIKE 'my-model-name'".`) - cmd.Flags().IntVar(&searchModelsReq.MaxResults, "max-results", searchModelsReq.MaxResults, `Maximum number of models desired.`) + cmd.Flags().Int64Var(&searchModelsReq.MaxResults, "max-results", searchModelsReq.MaxResults, `Maximum number of models desired.`) // TODO: array: order_by cmd.Flags().StringVar(&searchModelsReq.PageToken, "page-token", searchModelsReq.PageToken, `Pagination token to go to the next page based on a previous search query.`) @@ -2174,9 +2153,7 @@ func newTestRegistryWebhook() *cobra.Command { cmd.Short = `Test a webhook.` cmd.Long = `Test a webhook. - **NOTE:** This endpoint is in Public Preview. - - Tests a registry webhook. + **NOTE:** This endpoint is in Public Preview. Tests a registry webhook. Arguments: ID: Webhook ID` @@ -2260,7 +2237,7 @@ func newTransitionStage() *cobra.Command { Transition a model version's stage. This is a Databricks workspace version of the [MLflow endpoint] that also accepts a comment associated with the - transition to be recorded.", + transition to be recorded. [MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#transition-modelversion-stage @@ -2275,8 +2252,7 @@ func newTransitionStage() *cobra.Command { * Production: Production stage. - * Archived: Archived stage. - Supported values: [Archived, None, Production, Staging] + * Archived: Archived stage. ARCHIVE_EXISTING_VERSIONS: Specifies whether to archive all current model versions in the target stage.` @@ -2318,10 +2294,7 @@ func newTransitionStage() *cobra.Command { transitionStageReq.Version = args[1] } if !cmd.Flags().Changed("json") { - _, err = fmt.Sscan(args[2], &transitionStageReq.Stage) - if err != nil { - return fmt.Errorf("invalid STAGE: %s", args[2]) - } + transitionStageReq.Stage = args[2] } if !cmd.Flags().Changed("json") { _, err = fmt.Sscan(args[3], &transitionStageReq.ArchiveExistingVersions) @@ -2496,11 +2469,11 @@ func newUpdateModel() *cobra.Command { updateModelReq.Name = args[0] } - err = w.ModelRegistry.UpdateModel(ctx, updateModelReq) + response, err := w.ModelRegistry.UpdateModel(ctx, updateModelReq) if err != nil { return err } - return nil + return cmdio.Render(ctx, response) } // Disable completions since they are not applicable. @@ -2582,11 +2555,11 @@ func newUpdateModelVersion() *cobra.Command { updateModelVersionReq.Version = args[1] } - err = w.ModelRegistry.UpdateModelVersion(ctx, updateModelVersionReq) + response, err := w.ModelRegistry.UpdateModelVersion(ctx, updateModelVersionReq) if err != nil { return err } - return nil + return cmdio.Render(ctx, response) } // Disable completions since they are not applicable. @@ -2696,15 +2669,13 @@ func newUpdateWebhook() *cobra.Command { // TODO: array: events // TODO: complex arg: http_url_spec // TODO: complex arg: job_spec - cmd.Flags().Var(&updateWebhookReq.Status, "status", `Enable or disable triggering the webhook, or put the webhook into test mode. Supported values: [ACTIVE, DISABLED, TEST_MODE]`) + cmd.Flags().Var(&updateWebhookReq.Status, "status", `Supported values: [ACTIVE, DISABLED, TEST_MODE]`) cmd.Use = "update-webhook ID" cmd.Short = `Update a webhook.` cmd.Long = `Update a webhook. - **NOTE:** This endpoint is in Public Preview. - - Updates a registry webhook. + **NOTE:** This endpoint is in Public Preview. Updates a registry webhook. Arguments: ID: Webhook ID` @@ -2744,11 +2715,11 @@ func newUpdateWebhook() *cobra.Command { updateWebhookReq.Id = args[0] } - err = w.ModelRegistry.UpdateWebhook(ctx, updateWebhookReq) + response, err := w.ModelRegistry.UpdateWebhook(ctx, updateWebhookReq) if err != nil { return err } - return nil + return cmdio.Render(ctx, response) } // Disable completions since they are not applicable. diff --git a/cmd/workspace/permissions/permissions.go b/cmd/workspace/permissions/permissions.go index 699e0bb6c6..1178565d7a 100755 --- a/cmd/workspace/permissions/permissions.go +++ b/cmd/workspace/permissions/permissions.go @@ -94,10 +94,10 @@ func newGet() *cobra.Command { Arguments: REQUEST_OBJECT_TYPE: The type of the request object. Can be one of the following: alerts, - authorization, clusters, cluster-policies, dashboards, dbsql-dashboards, - directories, experiments, files, instance-pools, jobs, notebooks, - pipelines, queries, registered-models, repos, serving-endpoints, or - warehouses. + alertsv2, authorization, clusters, cluster-policies, dashboards, + dbsql-dashboards, directories, experiments, files, instance-pools, jobs, + notebooks, pipelines, queries, registered-models, repos, + serving-endpoints, or warehouses. REQUEST_OBJECT_ID: The id of the request object.` cmd.Annotations = make(map[string]string) @@ -156,10 +156,10 @@ func newGetPermissionLevels() *cobra.Command { Arguments: REQUEST_OBJECT_TYPE: The type of the request object. Can be one of the following: alerts, - authorization, clusters, cluster-policies, dashboards, dbsql-dashboards, - directories, experiments, files, instance-pools, jobs, notebooks, - pipelines, queries, registered-models, repos, serving-endpoints, or - warehouses. + alertsv2, authorization, clusters, cluster-policies, dashboards, + dbsql-dashboards, directories, experiments, files, instance-pools, jobs, + notebooks, pipelines, queries, registered-models, repos, + serving-endpoints, or warehouses. REQUEST_OBJECT_ID: ` cmd.Annotations = make(map[string]string) @@ -225,10 +225,10 @@ func newSet() *cobra.Command { Arguments: REQUEST_OBJECT_TYPE: The type of the request object. Can be one of the following: alerts, - authorization, clusters, cluster-policies, dashboards, dbsql-dashboards, - directories, experiments, files, instance-pools, jobs, notebooks, - pipelines, queries, registered-models, repos, serving-endpoints, or - warehouses. + alertsv2, authorization, clusters, cluster-policies, dashboards, + dbsql-dashboards, directories, experiments, files, instance-pools, jobs, + notebooks, pipelines, queries, registered-models, repos, + serving-endpoints, or warehouses. REQUEST_OBJECT_ID: The id of the request object.` cmd.Annotations = make(map[string]string) @@ -305,10 +305,10 @@ func newUpdate() *cobra.Command { Arguments: REQUEST_OBJECT_TYPE: The type of the request object. Can be one of the following: alerts, - authorization, clusters, cluster-policies, dashboards, dbsql-dashboards, - directories, experiments, files, instance-pools, jobs, notebooks, - pipelines, queries, registered-models, repos, serving-endpoints, or - warehouses. + alertsv2, authorization, clusters, cluster-policies, dashboards, + dbsql-dashboards, directories, experiments, files, instance-pools, jobs, + notebooks, pipelines, queries, registered-models, repos, + serving-endpoints, or warehouses. REQUEST_OBJECT_ID: The id of the request object.` cmd.Annotations = make(map[string]string) diff --git a/cmd/workspace/pipelines/pipelines.go b/cmd/workspace/pipelines/pipelines.go index a9278128cc..e3e7e667c6 100755 --- a/cmd/workspace/pipelines/pipelines.go +++ b/cmd/workspace/pipelines/pipelines.go @@ -755,7 +755,7 @@ func newStartUpdate() *cobra.Command { cmd.Flags().Var(&startUpdateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - cmd.Flags().Var(&startUpdateReq.Cause, "cause", `What triggered this update. Supported values: [ + cmd.Flags().Var(&startUpdateReq.Cause, "cause", `Supported values: [ API_CALL, INFRASTRUCTURE_MAINTENANCE, JOB_TASK, diff --git a/cmd/workspace/providers/providers.go b/cmd/workspace/providers/providers.go index 08c3549588..23d18d5d4c 100755 --- a/cmd/workspace/providers/providers.go +++ b/cmd/workspace/providers/providers.go @@ -77,7 +77,7 @@ func newCreate() *cobra.Command { Arguments: NAME: The name of the Provider. - AUTHENTICATION_TYPE: The delta sharing authentication type. + AUTHENTICATION_TYPE: Supported values: [DATABRICKS, OAUTH_CLIENT_CREDENTIALS, OIDC_FEDERATION, TOKEN]` cmd.Annotations = make(map[string]string) diff --git a/cmd/workspace/queries-legacy/queries-legacy.go b/cmd/workspace/queries-legacy/queries-legacy.go index 458eb7db32..e52fa6d81c 100755 --- a/cmd/workspace/queries-legacy/queries-legacy.go +++ b/cmd/workspace/queries-legacy/queries-legacy.go @@ -3,8 +3,6 @@ package queries_legacy import ( - "fmt" - "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdctx" "github.com/databricks/cli/libs/cmdio" @@ -70,6 +68,15 @@ func newCreate() *cobra.Command { cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().StringVar(&createReq.DataSourceId, "data-source-id", createReq.DataSourceId, `Data source ID maps to the ID of the data source used by the resource and is distinct from the warehouse ID.`) + cmd.Flags().StringVar(&createReq.Description, "description", createReq.Description, `General description that conveys additional information about this query such as usage notes.`) + cmd.Flags().StringVar(&createReq.Name, "name", createReq.Name, `The title of this query that appears in list views, widget headings, and on the query page.`) + // TODO: any: options + cmd.Flags().StringVar(&createReq.Parent, "parent", createReq.Parent, `The identifier of the workspace folder containing the object.`) + cmd.Flags().StringVar(&createReq.Query, "query", createReq.Query, `The text of the query to be run.`) + cmd.Flags().Var(&createReq.RunAsRole, "run-as-role", `Sets the **Run as** role for the object. Supported values: [owner, viewer]`) + // TODO: array: tags + cmd.Use = "create" cmd.Short = `Create a new query definition.` cmd.Long = `Create a new query definition. @@ -91,6 +98,11 @@ func newCreate() *cobra.Command { cmd.Annotations = make(map[string]string) + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(0) + return check(cmd, args) + } + cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -107,8 +119,6 @@ func newCreate() *cobra.Command { return err } } - } else { - return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") } response, err := w.QueriesLegacy.Create(ctx, createReq) @@ -159,28 +169,16 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := cmdctx.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No QUERY_ID argument specified. Loading names for Queries Legacy drop-down." - names, err := w.QueriesLegacy.LegacyQueryNameToIdMap(ctx, sql.ListQueriesLegacyRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Queries Legacy drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have ") - } deleteReq.QueryId = args[0] err = w.QueriesLegacy.Delete(ctx, deleteReq) @@ -230,28 +228,16 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := cmdctx.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No QUERY_ID argument specified. Loading names for Queries Legacy drop-down." - names, err := w.QueriesLegacy.LegacyQueryNameToIdMap(ctx, sql.ListQueriesLegacyRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Queries Legacy drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have ") - } getReq.QueryId = args[0] response, err := w.QueriesLegacy.Get(ctx, getReq) @@ -363,28 +349,16 @@ func newRestore() *cobra.Command { cmd.Annotations = make(map[string]string) + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := cmdctx.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No QUERY_ID argument specified. Loading names for Queries Legacy drop-down." - names, err := w.QueriesLegacy.LegacyQueryNameToIdMap(ctx, sql.ListQueriesLegacyRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Queries Legacy drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have ") - } restoreReq.QueryId = args[0] err = w.QueriesLegacy.Restore(ctx, restoreReq) @@ -446,6 +420,11 @@ func newUpdate() *cobra.Command { cmd.Annotations = make(map[string]string) + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -463,23 +442,6 @@ func newUpdate() *cobra.Command { } } } - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No QUERY_ID argument specified. Loading names for Queries Legacy drop-down." - names, err := w.QueriesLegacy.LegacyQueryNameToIdMap(ctx, sql.ListQueriesLegacyRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Queries Legacy drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have ") - } updateReq.QueryId = args[0] response, err := w.QueriesLegacy.Update(ctx, updateReq) diff --git a/cmd/workspace/query-visualizations-legacy/query-visualizations-legacy.go b/cmd/workspace/query-visualizations-legacy/query-visualizations-legacy.go index f6281872ba..a7852161ff 100755 --- a/cmd/workspace/query-visualizations-legacy/query-visualizations-legacy.go +++ b/cmd/workspace/query-visualizations-legacy/query-visualizations-legacy.go @@ -69,6 +69,9 @@ func newCreate() *cobra.Command { cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().StringVar(&createReq.Description, "description", createReq.Description, `A short description of this visualization.`) + cmd.Flags().StringVar(&createReq.Name, "name", createReq.Name, `The name of the visualization that appears on dashboards and the query screen.`) + cmd.Use = "create" cmd.Short = `Add visualization to a query.` cmd.Long = `Add visualization to a query. @@ -147,7 +150,7 @@ func newDelete() *cobra.Command { [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html Arguments: - ID: Widget ID returned by :method:queryvizualisations/create` + ID: Widget ID returned by :method:queryvisualizations/create` cmd.Annotations = make(map[string]string) @@ -199,6 +202,15 @@ func newUpdate() *cobra.Command { cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().StringVar(&updateReq.CreatedAt, "created-at", updateReq.CreatedAt, ``) + cmd.Flags().StringVar(&updateReq.Description, "description", updateReq.Description, `A short description of this visualization.`) + cmd.Flags().StringVar(&updateReq.Id, "id", updateReq.Id, `The UUID for this visualization.`) + cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `The name of the visualization that appears on dashboards and the query screen.`) + // TODO: any: options + // TODO: complex arg: query + cmd.Flags().StringVar(&updateReq.Type, "type", updateReq.Type, `The type of visualization: chart, table, pivot table, and so on.`) + cmd.Flags().StringVar(&updateReq.UpdatedAt, "updated-at", updateReq.UpdatedAt, ``) + cmd.Use = "update ID" cmd.Short = `Edit existing visualization.` cmd.Long = `Edit existing visualization. @@ -236,8 +248,6 @@ func newUpdate() *cobra.Command { return err } } - } else { - return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") } updateReq.Id = args[0] diff --git a/cmd/workspace/recipients/recipients.go b/cmd/workspace/recipients/recipients.go index df2fc1d932..5d7004fd71 100755 --- a/cmd/workspace/recipients/recipients.go +++ b/cmd/workspace/recipients/recipients.go @@ -97,7 +97,7 @@ func newCreate() *cobra.Command { Arguments: NAME: Name of Recipient. - AUTHENTICATION_TYPE: The delta sharing authentication type. + AUTHENTICATION_TYPE: Supported values: [DATABRICKS, OAUTH_CLIENT_CREDENTIALS, OIDC_FEDERATION, TOKEN]` cmd.Annotations = make(map[string]string) diff --git a/cmd/workspace/secrets/secrets.go b/cmd/workspace/secrets/secrets.go index fff209e13c..a05d6e2fca 100755 --- a/cmd/workspace/secrets/secrets.go +++ b/cmd/workspace/secrets/secrets.go @@ -85,8 +85,43 @@ func newCreateScope() *cobra.Command { cmd.Short = `Create a new secret scope.` cmd.Long = `Create a new secret scope. + Creates a new secret scope. + The scope name must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters. + + Example request: + + .. code:: + + { "scope": "my-simple-databricks-scope", "initial_manage_principal": "users" + "scope_backend_type": "databricks|azure_keyvault", # below is only required if + scope type is azure_keyvault "backend_azure_keyvault": { "resource_id": + "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/xxxx/providers/Microsoft.KeyVault/vaults/xxxx", + "tenant_id": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", "dns_name": + "https://xxxx.vault.azure.net/", } } + + If initial_manage_principal is specified, the initial ACL applied to the + scope is applied to the supplied principal (user or group) with MANAGE + permissions. The only supported principal for this option is the group + users, which contains all users in the workspace. If + initial_manage_principal is not specified, the initial ACL with MANAGE + permission applied to the scope is assigned to the API request issuer's user + identity. + + If scope_backend_type is azure_keyvault, a secret scope is created + with secrets from a given Azure KeyVault. The caller must provide the + keyvault_resource_id and the tenant_id for the key vault. If + scope_backend_type is databricks or is unspecified, an empty secret + scope is created and stored in Databricks's own storage. + + Throws RESOURCE_ALREADY_EXISTS if a scope with the given name already + exists. Throws RESOURCE_LIMIT_EXCEEDED if maximum number of scopes in the + workspace is exceeded. Throws INVALID_PARAMETER_VALUE if the scope name is + invalid. Throws BAD_REQUEST if request violated constraints. Throws + CUSTOMER_UNAUTHORIZED if normal user attempts to create a scope with name + reserved for databricks internal usage. Throws UNAUTHENTICATED if unable + to verify user access permission on Azure KeyVault Arguments: SCOPE: Scope name requested by the user. Scope names are unique.` @@ -168,10 +203,18 @@ func newDeleteAcl() *cobra.Command { Deletes the given ACL on the given scope. - Users must have the MANAGE permission to invoke this API. Throws - RESOURCE_DOES_NOT_EXIST if no such secret scope, principal, or ACL exists. - Throws PERMISSION_DENIED if the user does not have permission to make this - API call. + Users must have the MANAGE permission to invoke this API. + + Example request: + + .. code:: + + { "scope": "my-secret-scope", "principal": "data-scientists" } + + Throws RESOURCE_DOES_NOT_EXIST if no such secret scope, principal, or ACL + exists. Throws PERMISSION_DENIED if the user does not have permission to + make this API call. Throws INVALID_PARAMETER_VALUE if the permission or + principal is invalid. Arguments: SCOPE: The name of the scope to remove permissions from. @@ -257,9 +300,16 @@ func newDeleteScope() *cobra.Command { Deletes a secret scope. + Example request: + + .. code:: + + { "scope": "my-secret-scope" } + Throws RESOURCE_DOES_NOT_EXIST if the scope does not exist. Throws PERMISSION_DENIED if the user does not have permission to make this API - call. + call. Throws BAD_REQUEST if system user attempts to delete internal secret + scope. Arguments: SCOPE: Name of the scope to delete.` @@ -340,11 +390,18 @@ func newDeleteSecret() *cobra.Command { cmd.Long = `Delete a secret. Deletes the secret stored in this secret scope. You must have WRITE or - MANAGE permission on the secret scope. + MANAGE permission on the Secret Scope. + + Example request: + + .. code:: + + { "scope": "my-secret-scope", "key": "my-secret-key" } Throws RESOURCE_DOES_NOT_EXIST if no such secret scope or secret exists. Throws PERMISSION_DENIED if the user does not have permission to make this - API call. + API call. Throws BAD_REQUEST if system user attempts to delete an internal + secret, or request is made against Azure KeyVault backed scope. Arguments: SCOPE: The name of the scope that contains the secret to delete. @@ -425,12 +482,20 @@ func newGetAcl() *cobra.Command { cmd.Short = `Get secret ACL details.` cmd.Long = `Get secret ACL details. - Gets the details about the given ACL, such as the group and permission. Users - must have the MANAGE permission to invoke this API. + Describes the details about the given ACL, such as the group and permission. + + Users must have the MANAGE permission to invoke this API. + + Example response: + + .. code:: + + { "principal": "data-scientists", "permission": "READ" } Throws RESOURCE_DOES_NOT_EXIST if no such secret scope exists. Throws PERMISSION_DENIED if the user does not have permission to make this API - call. + call. Throws INVALID_PARAMETER_VALUE if the permission or principal is + invalid. Arguments: SCOPE: The name of the scope to fetch ACL information from. @@ -488,22 +553,37 @@ func newGetSecret() *cobra.Command { cmd.Short = `Get a secret.` cmd.Long = `Get a secret. - Gets the bytes representation of a secret value for the specified scope and - key. + Gets a secret for a given key and scope. This API can only be called from the + DBUtils interface. Users need the READ permission to make this call. + + Example response: + + .. code:: - Users need the READ permission to make this call. + { "key": "my-string-key", "value": } Note that the secret value returned is in bytes. The interpretation of the bytes is determined by the caller in DBUtils and the type the data is decoded into. + Throws RESOURCE_DOES_NOT_EXIST if no such secret or secret scope exists. Throws PERMISSION_DENIED if the user does not have permission to make this - API call. Throws RESOURCE_DOES_NOT_EXIST if no such secret or secret scope - exists. + API call. + + Note: This is explicitly an undocumented API. It also doesn't need to be + supported for the /preview prefix, because it's not a customer-facing API + (i.e. only used for DBUtils SecretUtils to fetch secrets). + + Throws RESOURCE_DOES_NOT_EXIST if no such secret scope or secret exists. + Throws BAD_REQUEST if normal user calls get secret outside of a notebook. + AKV specific errors: Throws INVALID_PARAMETER_VALUE if secret name is not + alphanumeric or too long. Throws PERMISSION_DENIED if secret manager + cannot access AKV with 403 error Throws MALFORMED_REQUEST if secret + manager cannot access AKV with any other 4xx error Arguments: - SCOPE: The name of the scope to fetch secret information from. - KEY: The key to fetch secret for.` + SCOPE: The name of the scope that contains the secret. + KEY: Name of the secret to fetch value information.` cmd.Annotations = make(map[string]string) @@ -557,8 +637,16 @@ func newListAcls() *cobra.Command { cmd.Short = `Lists ACLs.` cmd.Long = `Lists ACLs. - List the ACLs for a given secret scope. Users must have the MANAGE - permission to invoke this API. + Lists the ACLs set on the given scope. + + Users must have the MANAGE permission to invoke this API. + + Example response: + + .. code:: + + { "acls": [{ "principal": "admins", "permission": "MANAGE" },{ "principal": + "data-scientists", "permission": "READ" }] } Throws RESOURCE_DOES_NOT_EXIST if no such secret scope exists. Throws PERMISSION_DENIED if the user does not have permission to make this API @@ -614,6 +702,13 @@ func newListScopes() *cobra.Command { Lists all secret scopes available in the workspace. + Example response: + + .. code:: + + { "scopes": [{ "name": "my-databricks-scope", "backend_type": "DATABRICKS" },{ + "name": "mount-points", "backend_type": "DATABRICKS" }] } + Throws PERMISSION_DENIED if the user does not have permission to make this API call.` @@ -661,8 +756,17 @@ func newListSecrets() *cobra.Command { operation; secret data cannot be retrieved using this API. Users need the READ permission to make this call. - The lastUpdatedTimestamp returned is in milliseconds since epoch. Throws - RESOURCE_DOES_NOT_EXIST if no such secret scope exists. Throws + Example response: + + .. code:: + + { "secrets": [ { "key": "my-string-key"", "last_updated_timestamp": + "1520467595000" }, { "key": "my-byte-key", "last_updated_timestamp": + "1520467595000" }, ] } + + The lastUpdatedTimestamp returned is in milliseconds since epoch. + + Throws RESOURCE_DOES_NOT_EXIST if no such secret scope exists. Throws PERMISSION_DENIED if the user does not have permission to make this API call. @@ -720,15 +824,15 @@ func newPutAcl() *cobra.Command { cmd.Short = `Create/update an ACL.` cmd.Long = `Create/update an ACL. - Creates or overwrites the Access Control List (ACL) associated with the given - principal (user or group) on the specified scope point. - - In general, a user or group will use the most powerful permission available to - them, and permissions are ordered as follows: + Creates or overwrites the ACL associated with the given principal (user or + group) on the specified scope point. In general, a user or group will use the + most powerful permission available to them, and permissions are ordered as + follows: - * MANAGE - Allowed to change ACLs, and read and write to this secret scope. - * WRITE - Allowed to read and write to this secret scope. * READ - Allowed - to read this secret scope and list what secrets are available. + * MANAGE - Allowed to change ACLs, and read and write to this secret + scope. * WRITE - Allowed to read and write to this secret scope. * + READ - Allowed to read this secret scope and list what secrets are + available. Note that in general, secret values can only be read from within a command on a cluster (for example, through a notebook). There is no API to read the @@ -738,6 +842,13 @@ func newPutAcl() *cobra.Command { Users must have the MANAGE permission to invoke this API. + Example request: + + .. code:: + + { "scope": "my-secret-scope", "principal": "data-scientists", "permission": + "READ" } + The principal is a user or group name corresponding to an existing Databricks principal to be granted or revoked access. diff --git a/cmd/workspace/service-principal-secrets-proxy/service-principal-secrets-proxy.go b/cmd/workspace/service-principal-secrets-proxy/service-principal-secrets-proxy.go new file mode 100755 index 0000000000..edbda27e40 --- /dev/null +++ b/cmd/workspace/service-principal-secrets-proxy/service-principal-secrets-proxy.go @@ -0,0 +1,246 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package service_principal_secrets_proxy + +import ( + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdctx" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/oauth2" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "service-principal-secrets-proxy", + Short: `These APIs enable administrators to manage service principal secrets at the workspace level.`, + Long: `These APIs enable administrators to manage service principal secrets at the + workspace level. To use these APIs, the service principal must be first added + to the current workspace. + + You can use the generated secrets to obtain OAuth access tokens for a service + principal, which can then be used to access Databricks Accounts and Workspace + APIs. For more information, see [Authentication using OAuth tokens for service + principals]. + + In addition, the generated secrets can be used to configure the Databricks + Terraform Providerto authenticate with the service principal. For more + information, see [Databricks Terraform Provider]. + + [Authentication using OAuth tokens for service principals]: https://docs.databricks.com/dev-tools/authentication-oauth.html + [Databricks Terraform Provider]: https://github.com/databricks/terraform-provider-databricks/blob/master/docs/index.md#authenticating-with-service-principal`, + GroupID: "oauth2", + Annotations: map[string]string{ + "package": "oauth2", + }, + RunE: root.ReportUnknownSubcommand, + } + + // Add methods + cmd.AddCommand(newCreate()) + cmd.AddCommand(newDelete()) + cmd.AddCommand(newList()) + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start create command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *oauth2.CreateServicePrincipalSecretRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq oauth2.CreateServicePrincipalSecretRequest + var createJson flags.JsonFlag + + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().StringVar(&createReq.Lifetime, "lifetime", createReq.Lifetime, `The lifetime of the secret in seconds.`) + + cmd.Use = "create SERVICE_PRINCIPAL_ID" + cmd.Short = `Create service principal secret.` + cmd.Long = `Create service principal secret. + + Create a secret for the given service principal. + + Arguments: + SERVICE_PRINCIPAL_ID: The service principal ID.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := createJson.Unmarshal(&createReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + createReq.ServicePrincipalId = args[0] + + response, err := w.ServicePrincipalSecretsProxy.Create(ctx, createReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +// start delete command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *oauth2.DeleteServicePrincipalSecretRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq oauth2.DeleteServicePrincipalSecretRequest + + cmd.Use = "delete SERVICE_PRINCIPAL_ID SECRET_ID" + cmd.Short = `Delete service principal secret.` + cmd.Long = `Delete service principal secret. + + Delete a secret from the given service principal. + + Arguments: + SERVICE_PRINCIPAL_ID: The service principal ID. + SECRET_ID: The secret ID.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + deleteReq.ServicePrincipalId = args[0] + deleteReq.SecretId = args[1] + + err = w.ServicePrincipalSecretsProxy.Delete(ctx, deleteReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +// start list command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *oauth2.ListServicePrincipalSecretsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq oauth2.ListServicePrincipalSecretsRequest + + cmd.Flags().IntVar(&listReq.PageSize, "page-size", listReq.PageSize, ``) + cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `An opaque page token which was the next_page_token in the response of the previous request to list the secrets for this service principal.`) + + cmd.Use = "list SERVICE_PRINCIPAL_ID" + cmd.Short = `List service principal secrets.` + cmd.Long = `List service principal secrets. + + List all secrets associated with the given service principal. This operation + only returns information about the secrets themselves and does not include the + secret values. + + Arguments: + SERVICE_PRINCIPAL_ID: The service principal ID.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + listReq.ServicePrincipalId = args[0] + + response := w.ServicePrincipalSecretsProxy.List(ctx, listReq) + return cmdio.RenderIterator(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +// end service ServicePrincipalSecretsProxy diff --git a/cmd/workspace/serving-endpoints/serving-endpoints.go b/cmd/workspace/serving-endpoints/serving-endpoints.go index 3542d92c53..997ec86bb8 100755 --- a/cmd/workspace/serving-endpoints/serving-endpoints.go +++ b/cmd/workspace/serving-endpoints/serving-endpoints.go @@ -158,6 +158,7 @@ func newCreate() *cobra.Command { // TODO: complex arg: ai_gateway cmd.Flags().StringVar(&createReq.BudgetPolicyId, "budget-policy-id", createReq.BudgetPolicyId, `The budget policy to be applied to the serving endpoint.`) // TODO: complex arg: config + cmd.Flags().StringVar(&createReq.Description, "description", createReq.Description, ``) // TODO: array: rate_limits cmd.Flags().BoolVar(&createReq.RouteOptimized, "route-optimized", createReq.RouteOptimized, `Enable route optimization for the serving endpoint.`) // TODO: array: tags diff --git a/cmd/workspace/settings/settings.go b/cmd/workspace/settings/settings.go index 2754412a77..debd7af8e2 100755 --- a/cmd/workspace/settings/settings.go +++ b/cmd/workspace/settings/settings.go @@ -12,6 +12,7 @@ import ( compliance_security_profile "github.com/databricks/cli/cmd/workspace/compliance-security-profile" dashboard_email_subscriptions "github.com/databricks/cli/cmd/workspace/dashboard-email-subscriptions" default_namespace "github.com/databricks/cli/cmd/workspace/default-namespace" + default_warehouse_id "github.com/databricks/cli/cmd/workspace/default-warehouse-id" disable_legacy_access "github.com/databricks/cli/cmd/workspace/disable-legacy-access" disable_legacy_dbfs "github.com/databricks/cli/cmd/workspace/disable-legacy-dbfs" enable_export_notebook "github.com/databricks/cli/cmd/workspace/enable-export-notebook" @@ -46,6 +47,7 @@ func New() *cobra.Command { cmd.AddCommand(compliance_security_profile.New()) cmd.AddCommand(dashboard_email_subscriptions.New()) cmd.AddCommand(default_namespace.New()) + cmd.AddCommand(default_warehouse_id.New()) cmd.AddCommand(disable_legacy_access.New()) cmd.AddCommand(disable_legacy_dbfs.New()) cmd.AddCommand(enable_export_notebook.New()) diff --git a/cmd/workspace/tables/tables.go b/cmd/workspace/tables/tables.go index c1068fc691..c005432256 100755 --- a/cmd/workspace/tables/tables.go +++ b/cmd/workspace/tables/tables.go @@ -3,8 +3,6 @@ package tables import ( - "fmt" - "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdctx" "github.com/databricks/cli/libs/cmdio" @@ -82,28 +80,16 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := cmdctx.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No FULL_NAME argument specified. Loading names for Tables drop-down." - names, err := w.Tables.TableInfoNameToTableIdMap(ctx, catalog.ListTablesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Tables drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Full name of the table") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have full name of the table") - } deleteReq.FullName = args[0] err = w.Tables.Delete(ctx, deleteReq) @@ -157,28 +143,16 @@ func newExists() *cobra.Command { cmd.Annotations = make(map[string]string) + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := cmdctx.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No FULL_NAME argument specified. Loading names for Tables drop-down." - names, err := w.Tables.TableInfoNameToTableIdMap(ctx, catalog.ListTablesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Tables drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Full name of the table") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have full name of the table") - } existsReq.FullName = args[0] response, err := w.Tables.Exists(ctx, existsReq) @@ -216,7 +190,7 @@ func newGet() *cobra.Command { cmd.Flags().BoolVar(&getReq.IncludeBrowse, "include-browse", getReq.IncludeBrowse, `Whether to include tables in the response for which the principal can only access selective metadata for.`) cmd.Flags().BoolVar(&getReq.IncludeDeltaMetadata, "include-delta-metadata", getReq.IncludeDeltaMetadata, `Whether delta metadata should be included in the response.`) - cmd.Flags().BoolVar(&getReq.IncludeManifestCapabilities, "include-manifest-capabilities", getReq.IncludeManifestCapabilities, `Whether to include a manifest containing capabilities the table has.`) + cmd.Flags().BoolVar(&getReq.IncludeManifestCapabilities, "include-manifest-capabilities", getReq.IncludeManifestCapabilities, `Whether to include a manifest containing table capabilities in the response.`) cmd.Use = "get FULL_NAME" cmd.Short = `Get a table.` @@ -235,28 +209,16 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := cmdctx.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No FULL_NAME argument specified. Loading names for Tables drop-down." - names, err := w.Tables.TableInfoNameToTableIdMap(ctx, catalog.ListTablesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Tables drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Full name of the table") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have full name of the table") - } getReq.FullName = args[0] response, err := w.Tables.Get(ctx, getReq) @@ -293,8 +255,7 @@ func newList() *cobra.Command { var listReq catalog.ListTablesRequest cmd.Flags().BoolVar(&listReq.IncludeBrowse, "include-browse", listReq.IncludeBrowse, `Whether to include tables in the response for which the principal can only access selective metadata for.`) - cmd.Flags().BoolVar(&listReq.IncludeDeltaMetadata, "include-delta-metadata", listReq.IncludeDeltaMetadata, `Whether delta metadata should be included in the response.`) - cmd.Flags().BoolVar(&listReq.IncludeManifestCapabilities, "include-manifest-capabilities", listReq.IncludeManifestCapabilities, `Whether to include a manifest containing capabilities the table has.`) + cmd.Flags().BoolVar(&listReq.IncludeManifestCapabilities, "include-manifest-capabilities", listReq.IncludeManifestCapabilities, `Whether to include a manifest containing table capabilities in the response.`) cmd.Flags().IntVar(&listReq.MaxResults, "max-results", listReq.MaxResults, `Maximum number of tables to return.`) cmd.Flags().BoolVar(&listReq.OmitColumns, "omit-columns", listReq.OmitColumns, `Whether to omit the columns of the table from the response or not.`) cmd.Flags().BoolVar(&listReq.OmitProperties, "omit-properties", listReq.OmitProperties, `Whether to omit the properties of the table from the response or not.`) @@ -361,7 +322,7 @@ func newListSummaries() *cobra.Command { var listSummariesReq catalog.ListSummariesRequest - cmd.Flags().BoolVar(&listSummariesReq.IncludeManifestCapabilities, "include-manifest-capabilities", listSummariesReq.IncludeManifestCapabilities, `Whether to include a manifest containing capabilities the table has.`) + cmd.Flags().BoolVar(&listSummariesReq.IncludeManifestCapabilities, "include-manifest-capabilities", listSummariesReq.IncludeManifestCapabilities, `Whether to include a manifest containing table capabilities in the response.`) cmd.Flags().IntVar(&listSummariesReq.MaxResults, "max-results", listSummariesReq.MaxResults, `Maximum number of summaries for tables to return.`) cmd.Flags().StringVar(&listSummariesReq.PageToken, "page-token", listSummariesReq.PageToken, `Opaque pagination token to go to next page based on previous query.`) cmd.Flags().StringVar(&listSummariesReq.SchemaNamePattern, "schema-name-pattern", listSummariesReq.SchemaNamePattern, `A sql LIKE pattern (% and _) for schema names.`) @@ -388,28 +349,16 @@ func newListSummaries() *cobra.Command { cmd.Annotations = make(map[string]string) + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := cmdctx.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No CATALOG_NAME argument specified. Loading names for Tables drop-down." - names, err := w.Tables.TableInfoNameToTableIdMap(ctx, catalog.ListTablesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Tables drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Name of parent catalog for tables of interest") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have name of parent catalog for tables of interest") - } listSummariesReq.CatalogName = args[0] response := w.Tables.ListSummaries(ctx, listSummariesReq) @@ -445,7 +394,7 @@ func newUpdate() *cobra.Command { cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, ``) + cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of current owner of table.`) cmd.Use = "update FULL_NAME" cmd.Short = `Update a table owner.` @@ -465,6 +414,11 @@ func newUpdate() *cobra.Command { cmd.Annotations = make(map[string]string) + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -482,23 +436,6 @@ func newUpdate() *cobra.Command { } } } - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No FULL_NAME argument specified. Loading names for Tables drop-down." - names, err := w.Tables.TableInfoNameToTableIdMap(ctx, catalog.ListTablesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Tables drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Full name of the table") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have full name of the table") - } updateReq.FullName = args[0] err = w.Tables.Update(ctx, updateReq) diff --git a/cmd/workspace/vector-search-indexes/vector-search-indexes.go b/cmd/workspace/vector-search-indexes/vector-search-indexes.go index 9e88ed1ebd..8a97b75227 100755 --- a/cmd/workspace/vector-search-indexes/vector-search-indexes.go +++ b/cmd/workspace/vector-search-indexes/vector-search-indexes.go @@ -87,12 +87,7 @@ func newCreateIndex() *cobra.Command { NAME: Name of the index ENDPOINT_NAME: Name of the endpoint to be used for serving the index PRIMARY_KEY: Primary key of the index - INDEX_TYPE: There are 2 types of Vector Search indexes: - DELTA_SYNC: An index that - automatically syncs with a source Delta Table, automatically and - incrementally updating the index as the underlying data in the Delta Table - changes. - DIRECT_ACCESS: An index that supports direct read and write - of vectors and metadata through our REST and SDK APIs. With this model, - the user manages index updates. + INDEX_TYPE: Supported values: [DELTA_SYNC, DIRECT_ACCESS]` cmd.Annotations = make(map[string]string) diff --git a/cmd/workspace/volumes/volumes.go b/cmd/workspace/volumes/volumes.go index 1310f4da11..ce76a94fc5 100755 --- a/cmd/workspace/volumes/volumes.go +++ b/cmd/workspace/volumes/volumes.go @@ -98,12 +98,7 @@ func newCreate() *cobra.Command { CATALOG_NAME: The name of the catalog where the schema and the volume are SCHEMA_NAME: The name of the schema where the volume is NAME: The name of the volume - VOLUME_TYPE: The type of the volume. An external volume is located in the specified - external location. A managed volume is located in the default location - which is specified by the parent schema, or the parent catalog, or the - Metastore. [Learn more] - - [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external + VOLUME_TYPE: Supported values: [EXTERNAL, MANAGED]` cmd.Annotations = make(map[string]string) diff --git a/cmd/workspace/warehouses/warehouses.go b/cmd/workspace/warehouses/warehouses.go index 5a1a532512..8928a265a8 100755 --- a/cmd/workspace/warehouses/warehouses.go +++ b/cmd/workspace/warehouses/warehouses.go @@ -88,9 +88,9 @@ func newCreate() *cobra.Command { cmd.Flags().IntVar(&createReq.MaxNumClusters, "max-num-clusters", createReq.MaxNumClusters, `Maximum number of clusters that the autoscaler will create to handle concurrent queries.`) cmd.Flags().IntVar(&createReq.MinNumClusters, "min-num-clusters", createReq.MinNumClusters, `Minimum number of available clusters that will be maintained for this SQL warehouse.`) cmd.Flags().StringVar(&createReq.Name, "name", createReq.Name, `Logical name for the cluster.`) - cmd.Flags().Var(&createReq.SpotInstancePolicy, "spot-instance-policy", `Configurations whether the warehouse should use spot instances. Supported values: [COST_OPTIMIZED, POLICY_UNSPECIFIED, RELIABILITY_OPTIMIZED]`) + cmd.Flags().Var(&createReq.SpotInstancePolicy, "spot-instance-policy", `Supported values: [COST_OPTIMIZED, POLICY_UNSPECIFIED, RELIABILITY_OPTIMIZED]`) // TODO: complex arg: tags - cmd.Flags().Var(&createReq.WarehouseType, "warehouse-type", `Warehouse type: PRO or CLASSIC. Supported values: [CLASSIC, PRO, TYPE_UNSPECIFIED]`) + cmd.Flags().Var(&createReq.WarehouseType, "warehouse-type", `Supported values: [CLASSIC, PRO, TYPE_UNSPECIFIED]`) cmd.Use = "create" cmd.Short = `Create a warehouse.` @@ -262,9 +262,9 @@ func newEdit() *cobra.Command { cmd.Flags().IntVar(&editReq.MaxNumClusters, "max-num-clusters", editReq.MaxNumClusters, `Maximum number of clusters that the autoscaler will create to handle concurrent queries.`) cmd.Flags().IntVar(&editReq.MinNumClusters, "min-num-clusters", editReq.MinNumClusters, `Minimum number of available clusters that will be maintained for this SQL warehouse.`) cmd.Flags().StringVar(&editReq.Name, "name", editReq.Name, `Logical name for the cluster.`) - cmd.Flags().Var(&editReq.SpotInstancePolicy, "spot-instance-policy", `Configurations whether the warehouse should use spot instances. Supported values: [COST_OPTIMIZED, POLICY_UNSPECIFIED, RELIABILITY_OPTIMIZED]`) + cmd.Flags().Var(&editReq.SpotInstancePolicy, "spot-instance-policy", `Supported values: [COST_OPTIMIZED, POLICY_UNSPECIFIED, RELIABILITY_OPTIMIZED]`) // TODO: complex arg: tags - cmd.Flags().Var(&editReq.WarehouseType, "warehouse-type", `Warehouse type: PRO or CLASSIC. Supported values: [CLASSIC, PRO, TYPE_UNSPECIFIED]`) + cmd.Flags().Var(&editReq.WarehouseType, "warehouse-type", `Supported values: [CLASSIC, PRO, TYPE_UNSPECIFIED]`) cmd.Use = "edit ID" cmd.Short = `Update a warehouse.` diff --git a/experimental/python/databricks/bundles/pipelines/_models/ingestion_source_type.py b/experimental/python/databricks/bundles/pipelines/_models/ingestion_source_type.py index b5ed997cfd..d33ba2493e 100644 --- a/experimental/python/databricks/bundles/pipelines/_models/ingestion_source_type.py +++ b/experimental/python/databricks/bundles/pipelines/_models/ingestion_source_type.py @@ -7,6 +7,7 @@ class IngestionSourceType(Enum): POSTGRESQL = "POSTGRESQL" SQLSERVER = "SQLSERVER" SALESFORCE = "SALESFORCE" + BIGQUERY = "BIGQUERY" NETSUITE = "NETSUITE" WORKDAY_RAAS = "WORKDAY_RAAS" GA4_RAW_DATA = "GA4_RAW_DATA" @@ -16,6 +17,7 @@ class IngestionSourceType(Enum): TERADATA = "TERADATA" SHAREPOINT = "SHAREPOINT" DYNAMICS365 = "DYNAMICS365" + CONFLUENCE = "CONFLUENCE" IngestionSourceTypeParam = ( @@ -24,6 +26,7 @@ class IngestionSourceType(Enum): "POSTGRESQL", "SQLSERVER", "SALESFORCE", + "BIGQUERY", "NETSUITE", "WORKDAY_RAAS", "GA4_RAW_DATA", @@ -33,6 +36,7 @@ class IngestionSourceType(Enum): "TERADATA", "SHAREPOINT", "DYNAMICS365", + "CONFLUENCE", ] | IngestionSourceType ) diff --git a/experimental/python/databricks/bundles/pipelines/_models/path_pattern.py b/experimental/python/databricks/bundles/pipelines/_models/path_pattern.py index 557767c213..e8e04fd949 100644 --- a/experimental/python/databricks/bundles/pipelines/_models/path_pattern.py +++ b/experimental/python/databricks/bundles/pipelines/_models/path_pattern.py @@ -11,9 +11,7 @@ @dataclass(kw_only=True) class PathPattern: - """ - :meta private: [EXPERIMENTAL] - """ + """""" include: VariableOrOptional[str] = None """ diff --git a/experimental/python/databricks/bundles/pipelines/_models/pipeline.py b/experimental/python/databricks/bundles/pipelines/_models/pipeline.py index f4435845f4..a700046063 100644 --- a/experimental/python/databricks/bundles/pipelines/_models/pipeline.py +++ b/experimental/python/databricks/bundles/pipelines/_models/pipeline.py @@ -103,8 +103,6 @@ class Pipeline(Resource): environment: VariableOrOptional[PipelinesEnvironment] = None """ - :meta private: [EXPERIMENTAL] - Environment specification for this pipeline used to install dependencies. """ @@ -166,8 +164,6 @@ class Pipeline(Resource): root_path: VariableOrOptional[str] = None """ - :meta private: [EXPERIMENTAL] - Root path for this pipeline. This is used as the root directory when editing the pipeline in the Databricks user interface and it is added to sys.path when executing Python sources during pipeline execution. @@ -260,8 +256,6 @@ class PipelineDict(TypedDict, total=False): environment: VariableOrOptional[PipelinesEnvironmentParam] """ - :meta private: [EXPERIMENTAL] - Environment specification for this pipeline used to install dependencies. """ @@ -323,8 +317,6 @@ class PipelineDict(TypedDict, total=False): root_path: VariableOrOptional[str] """ - :meta private: [EXPERIMENTAL] - Root path for this pipeline. This is used as the root directory when editing the pipeline in the Databricks user interface and it is added to sys.path when executing Python sources during pipeline execution. diff --git a/experimental/python/databricks/bundles/pipelines/_models/pipeline_library.py b/experimental/python/databricks/bundles/pipelines/_models/pipeline_library.py index 68bf2e5c47..459cf3bd8b 100644 --- a/experimental/python/databricks/bundles/pipelines/_models/pipeline_library.py +++ b/experimental/python/databricks/bundles/pipelines/_models/pipeline_library.py @@ -36,8 +36,6 @@ class PipelineLibrary: glob: VariableOrOptional[PathPattern] = None """ - :meta private: [EXPERIMENTAL] - The unified field to include source codes. Each entry can be a notebook path, a file path, or a folder path that ends `/**`. This field cannot be used together with `notebook` or `file`. @@ -80,8 +78,6 @@ class PipelineLibraryDict(TypedDict, total=False): glob: VariableOrOptional[PathPatternParam] """ - :meta private: [EXPERIMENTAL] - The unified field to include source codes. Each entry can be a notebook path, a file path, or a folder path that ends `/**`. This field cannot be used together with `notebook` or `file`. diff --git a/experimental/python/databricks/bundles/pipelines/_models/pipelines_environment.py b/experimental/python/databricks/bundles/pipelines/_models/pipelines_environment.py index bf92d5306c..dd4d3ef81a 100644 --- a/experimental/python/databricks/bundles/pipelines/_models/pipelines_environment.py +++ b/experimental/python/databricks/bundles/pipelines/_models/pipelines_environment.py @@ -12,8 +12,6 @@ @dataclass(kw_only=True) class PipelinesEnvironment: """ - :meta private: [EXPERIMENTAL] - The environment entity used to preserve serverless environment side panel, jobs' environment for non-notebook task, and DLT's environment for classic and serverless pipelines. In this minimal environment spec, only pip dependencies are supported. """ diff --git a/experimental/python/databricks/bundles/pipelines/_models/table_specific_config_scd_type.py b/experimental/python/databricks/bundles/pipelines/_models/table_specific_config_scd_type.py index 1ad679e4b0..6d138b7808 100644 --- a/experimental/python/databricks/bundles/pipelines/_models/table_specific_config_scd_type.py +++ b/experimental/python/databricks/bundles/pipelines/_models/table_specific_config_scd_type.py @@ -11,8 +11,9 @@ class TableSpecificConfigScdType(Enum): SCD_TYPE_1 = "SCD_TYPE_1" SCD_TYPE_2 = "SCD_TYPE_2" + APPEND_ONLY = "APPEND_ONLY" TableSpecificConfigScdTypeParam = ( - Literal["SCD_TYPE_1", "SCD_TYPE_2"] | TableSpecificConfigScdType + Literal["SCD_TYPE_1", "SCD_TYPE_2", "APPEND_ONLY"] | TableSpecificConfigScdType ) diff --git a/go.mod b/go.mod index 13d77cfcd5..46595041de 100644 --- a/go.mod +++ b/go.mod @@ -9,7 +9,7 @@ require ( github.com/BurntSushi/toml v1.5.0 // MIT github.com/Masterminds/semver/v3 v3.4.0 // MIT github.com/briandowns/spinner v1.23.1 // Apache 2.0 - github.com/databricks/databricks-sdk-go v0.73.1 // Apache 2.0 + github.com/databricks/databricks-sdk-go v0.75.0 // Apache 2.0 github.com/fatih/color v1.18.0 // MIT github.com/google/uuid v1.6.0 // BSD-3-Clause github.com/gorilla/mux v1.8.1 // BSD 3-Clause diff --git a/go.sum b/go.sum index 4f2698c711..122be8ee47 100644 --- a/go.sum +++ b/go.sum @@ -33,8 +33,8 @@ github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZ github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s= github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI= -github.com/databricks/databricks-sdk-go v0.73.1 h1:c+o2J+l37WnQWqY4muokpFL1E2a5oJVC5J+Z8oFNCeU= -github.com/databricks/databricks-sdk-go v0.73.1/go.mod h1:xBtjeP9nq+6MgTewZW1EcbRkD7aDY9gZvcRPcwPhZjw= +github.com/databricks/databricks-sdk-go v0.75.0 h1:BIRSPmUNtkSqAywFPOIsy2Oq+C9xc+X6TAGGYpKXuBo= +github.com/databricks/databricks-sdk-go v0.75.0/go.mod h1:xBtjeP9nq+6MgTewZW1EcbRkD7aDY9gZvcRPcwPhZjw= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= diff --git a/libs/testserver/pipelines.go b/libs/testserver/pipelines.go index ee132496fc..daefd3ee16 100644 --- a/libs/testserver/pipelines.go +++ b/libs/testserver/pipelines.go @@ -90,9 +90,7 @@ func (s *FakeWorkspace) PipelineUpdate(req Request, pipelineId string) Response setSpecDefaults(&spec, pipelineId) s.Pipelines[pipelineId] = item - return Response{ - Body: pipelines.EditPipelineResponse{}, - } + return Response{} } func (s *FakeWorkspace) PipelineStartUpdate(pipelineId string) Response {