diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 8af35ea492..bdd95ecce2 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -b95c2c6e21bec9551ec7d7d51ddf2dfe390b4522 \ No newline at end of file +f2843dd06e095a39dda2d454a97ceaf6767a2bf3 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index ef9e69f122..df4740a1f1 100755 --- a/.gitattributes +++ b/.gitattributes @@ -11,7 +11,8 @@ cmd/account/enable-ip-access-lists/enable-ip-access-lists.go linguist-generated= cmd/account/encryption-keys/encryption-keys.go linguist-generated=true cmd/account/esm-enablement-account/esm-enablement-account.go linguist-generated=true cmd/account/federation-policy/federation-policy.go linguist-generated=true -cmd/account/groups/groups.go linguist-generated=true +cmd/account/groups-v2/groups-v2.go linguist-generated=true +cmd/account/iam-v2/iam-v2.go linguist-generated=true cmd/account/ip-access-lists/ip-access-lists.go linguist-generated=true cmd/account/llm-proxy-partner-powered-account/llm-proxy-partner-powered-account.go linguist-generated=true cmd/account/llm-proxy-partner-powered-enforce/llm-proxy-partner-powered-enforce.go linguist-generated=true @@ -27,13 +28,13 @@ cmd/account/private-access/private-access.go linguist-generated=true cmd/account/published-app-integration/published-app-integration.go linguist-generated=true cmd/account/service-principal-federation-policy/service-principal-federation-policy.go linguist-generated=true cmd/account/service-principal-secrets/service-principal-secrets.go linguist-generated=true -cmd/account/service-principals/service-principals.go linguist-generated=true +cmd/account/service-principals-v2/service-principals-v2.go linguist-generated=true cmd/account/settings-v2/settings-v2.go linguist-generated=true cmd/account/settings/settings.go linguist-generated=true cmd/account/storage-credentials/storage-credentials.go linguist-generated=true cmd/account/storage/storage.go linguist-generated=true cmd/account/usage-dashboards/usage-dashboards.go linguist-generated=true -cmd/account/users/users.go linguist-generated=true +cmd/account/users-v2/users-v2.go linguist-generated=true cmd/account/vpc-endpoints/vpc-endpoints.go linguist-generated=true cmd/account/workspace-assignment/workspace-assignment.go linguist-generated=true cmd/account/workspace-network-configuration/workspace-network-configuration.go linguist-generated=true @@ -86,6 +87,7 @@ cmd/workspace/experiments/experiments.go linguist-generated=true cmd/workspace/external-lineage/external-lineage.go linguist-generated=true cmd/workspace/external-locations/external-locations.go linguist-generated=true cmd/workspace/external-metadata/external-metadata.go linguist-generated=true +cmd/workspace/feature-engineering/feature-engineering.go linguist-generated=true cmd/workspace/feature-store/feature-store.go linguist-generated=true cmd/workspace/forecasting/forecasting.go linguist-generated=true cmd/workspace/functions/functions.go linguist-generated=true @@ -93,7 +95,7 @@ cmd/workspace/genie/genie.go linguist-generated=true cmd/workspace/git-credentials/git-credentials.go linguist-generated=true cmd/workspace/global-init-scripts/global-init-scripts.go linguist-generated=true cmd/workspace/grants/grants.go linguist-generated=true -cmd/workspace/groups/groups.go linguist-generated=true +cmd/workspace/groups-v2/groups-v2.go linguist-generated=true cmd/workspace/instance-pools/instance-pools.go linguist-generated=true cmd/workspace/instance-profiles/instance-profiles.go linguist-generated=true cmd/workspace/ip-access-lists/ip-access-lists.go linguist-generated=true @@ -142,7 +144,7 @@ cmd/workspace/rfa/rfa.go linguist-generated=true cmd/workspace/schemas/schemas.go linguist-generated=true cmd/workspace/secrets/secrets.go linguist-generated=true cmd/workspace/service-principal-secrets-proxy/service-principal-secrets-proxy.go linguist-generated=true -cmd/workspace/service-principals/service-principals.go linguist-generated=true +cmd/workspace/service-principals-v2/service-principals-v2.go linguist-generated=true cmd/workspace/serving-endpoints/serving-endpoints.go linguist-generated=true cmd/workspace/settings/settings.go linguist-generated=true cmd/workspace/shares/shares.go linguist-generated=true @@ -156,12 +158,13 @@ cmd/workspace/temporary-path-credentials/temporary-path-credentials.go linguist- cmd/workspace/temporary-table-credentials/temporary-table-credentials.go linguist-generated=true cmd/workspace/token-management/token-management.go linguist-generated=true cmd/workspace/tokens/tokens.go linguist-generated=true -cmd/workspace/users/users.go linguist-generated=true +cmd/workspace/users-v2/users-v2.go linguist-generated=true cmd/workspace/vector-search-endpoints/vector-search-endpoints.go linguist-generated=true cmd/workspace/vector-search-indexes/vector-search-indexes.go linguist-generated=true cmd/workspace/volumes/volumes.go linguist-generated=true cmd/workspace/warehouses/warehouses.go linguist-generated=true cmd/workspace/workspace-bindings/workspace-bindings.go linguist-generated=true cmd/workspace/workspace-conf/workspace-conf.go linguist-generated=true +cmd/workspace/workspace-iam-v2/workspace-iam-v2.go linguist-generated=true cmd/workspace/workspace-settings-v2/workspace-settings-v2.go linguist-generated=true cmd/workspace/workspace/workspace.go linguist-generated=true diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 4b8ed3d9c6..0f140a23a9 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -14,3 +14,7 @@ * Introduce new bundle variable: `${workspace.current_user.domain_friendly_name}` ([#3623](https://github.com/databricks/cli/pull/3623)) ### API Changes +* Added `databricks account account-groups-v2` command group. +* Added `databricks account account-iam-v2` command group. +* Added `databricks feature-engineering` command group. +* Added `databricks shares list-shares` command. diff --git a/acceptance/bundle/refschema/out.fields.txt b/acceptance/bundle/refschema/out.fields.txt index db84dc8b40..513ecea1f1 100644 --- a/acceptance/bundle/refschema/out.fields.txt +++ b/acceptance/bundle/refschema/out.fields.txt @@ -162,6 +162,7 @@ resources.database_instances.*.child_instance_refs[*].name string ALL resources.database_instances.*.child_instance_refs[*].uid string ALL resources.database_instances.*.creation_time string ALL resources.database_instances.*.creator string ALL +resources.database_instances.*.effective_capacity string ALL resources.database_instances.*.effective_enable_pg_native_login bool ALL resources.database_instances.*.effective_enable_readable_secondaries bool ALL resources.database_instances.*.effective_node_count int ALL @@ -230,6 +231,8 @@ resources.jobs.*.environments[*].spec.dependencies[*] string INPUT STATE resources.jobs.*.environments[*].spec.environment_version string INPUT STATE resources.jobs.*.environments[*].spec.jar_dependencies []string INPUT STATE resources.jobs.*.environments[*].spec.jar_dependencies[*] string INPUT STATE +resources.jobs.*.environments[*].spec.java_dependencies []string INPUT STATE +resources.jobs.*.environments[*].spec.java_dependencies[*] string INPUT STATE resources.jobs.*.format jobs.Format INPUT STATE resources.jobs.*.git_source *jobs.GitSource INPUT STATE resources.jobs.*.git_source.git_branch string INPUT STATE @@ -419,6 +422,8 @@ resources.jobs.*.settings.environments[*].spec.dependencies[*] string REMOTE resources.jobs.*.settings.environments[*].spec.environment_version string REMOTE resources.jobs.*.settings.environments[*].spec.jar_dependencies []string REMOTE resources.jobs.*.settings.environments[*].spec.jar_dependencies[*] string REMOTE +resources.jobs.*.settings.environments[*].spec.java_dependencies []string REMOTE +resources.jobs.*.settings.environments[*].spec.java_dependencies[*] string REMOTE resources.jobs.*.settings.format jobs.Format REMOTE resources.jobs.*.settings.git_source *jobs.GitSource REMOTE resources.jobs.*.settings.git_source.git_branch string REMOTE @@ -607,6 +612,7 @@ resources.jobs.*.settings.tasks[*].depends_on[*].outcome string REMOTE resources.jobs.*.settings.tasks[*].depends_on[*].task_key string REMOTE resources.jobs.*.settings.tasks[*].description string REMOTE resources.jobs.*.settings.tasks[*].disable_auto_optimization bool REMOTE +resources.jobs.*.settings.tasks[*].disabled bool REMOTE resources.jobs.*.settings.tasks[*].email_notifications *jobs.TaskEmailNotifications REMOTE resources.jobs.*.settings.tasks[*].email_notifications.no_alert_for_skipped_runs bool REMOTE resources.jobs.*.settings.tasks[*].email_notifications.on_duration_warning_threshold_exceeded []string REMOTE @@ -666,6 +672,7 @@ resources.jobs.*.settings.tasks[*].for_each_task.task.depends_on[*].outcome stri resources.jobs.*.settings.tasks[*].for_each_task.task.depends_on[*].task_key string REMOTE resources.jobs.*.settings.tasks[*].for_each_task.task.description string REMOTE resources.jobs.*.settings.tasks[*].for_each_task.task.disable_auto_optimization bool REMOTE +resources.jobs.*.settings.tasks[*].for_each_task.task.disabled bool REMOTE resources.jobs.*.settings.tasks[*].for_each_task.task.email_notifications *jobs.TaskEmailNotifications REMOTE resources.jobs.*.settings.tasks[*].for_each_task.task.email_notifications.no_alert_for_skipped_runs bool REMOTE resources.jobs.*.settings.tasks[*].for_each_task.task.email_notifications.on_duration_warning_threshold_exceeded []string REMOTE @@ -1276,6 +1283,7 @@ resources.jobs.*.tasks[*].depends_on[*].outcome string INPUT STATE resources.jobs.*.tasks[*].depends_on[*].task_key string INPUT STATE resources.jobs.*.tasks[*].description string INPUT STATE resources.jobs.*.tasks[*].disable_auto_optimization bool INPUT STATE +resources.jobs.*.tasks[*].disabled bool INPUT STATE resources.jobs.*.tasks[*].email_notifications *jobs.TaskEmailNotifications INPUT STATE resources.jobs.*.tasks[*].email_notifications.no_alert_for_skipped_runs bool INPUT STATE resources.jobs.*.tasks[*].email_notifications.on_duration_warning_threshold_exceeded []string INPUT STATE @@ -1335,6 +1343,7 @@ resources.jobs.*.tasks[*].for_each_task.task.depends_on[*].outcome string INPUT resources.jobs.*.tasks[*].for_each_task.task.depends_on[*].task_key string INPUT STATE resources.jobs.*.tasks[*].for_each_task.task.description string INPUT STATE resources.jobs.*.tasks[*].for_each_task.task.disable_auto_optimization bool INPUT STATE +resources.jobs.*.tasks[*].for_each_task.task.disabled bool INPUT STATE resources.jobs.*.tasks[*].for_each_task.task.email_notifications *jobs.TaskEmailNotifications INPUT STATE resources.jobs.*.tasks[*].for_each_task.task.email_notifications.no_alert_for_skipped_runs bool INPUT STATE resources.jobs.*.tasks[*].for_each_task.task.email_notifications.on_duration_warning_threshold_exceeded []string INPUT STATE @@ -2034,6 +2043,7 @@ resources.pipelines.*.id string INPUT STATE resources.pipelines.*.ingestion_definition *pipelines.IngestionPipelineDefinition INPUT STATE resources.pipelines.*.ingestion_definition.connection_name string INPUT STATE resources.pipelines.*.ingestion_definition.ingestion_gateway_id string INPUT STATE +resources.pipelines.*.ingestion_definition.netsuite_jar_path string INPUT STATE resources.pipelines.*.ingestion_definition.objects []pipelines.IngestionConfig INPUT STATE resources.pipelines.*.ingestion_definition.objects[*] pipelines.IngestionConfig INPUT STATE resources.pipelines.*.ingestion_definition.objects[*].report *pipelines.ReportSpec INPUT STATE @@ -2057,6 +2067,14 @@ resources.pipelines.*.ingestion_definition.objects[*].report.table_configuration resources.pipelines.*.ingestion_definition.objects[*].report.table_configuration.scd_type pipelines.TableSpecificConfigScdType INPUT STATE resources.pipelines.*.ingestion_definition.objects[*].report.table_configuration.sequence_by []string INPUT STATE resources.pipelines.*.ingestion_definition.objects[*].report.table_configuration.sequence_by[*] string INPUT STATE +resources.pipelines.*.ingestion_definition.objects[*].report.table_configuration.workday_report_parameters *pipelines.IngestionPipelineDefinitionWorkdayReportParameters INPUT STATE +resources.pipelines.*.ingestion_definition.objects[*].report.table_configuration.workday_report_parameters.incremental bool INPUT STATE +resources.pipelines.*.ingestion_definition.objects[*].report.table_configuration.workday_report_parameters.parameters map[string]string INPUT STATE +resources.pipelines.*.ingestion_definition.objects[*].report.table_configuration.workday_report_parameters.parameters.* string INPUT STATE +resources.pipelines.*.ingestion_definition.objects[*].report.table_configuration.workday_report_parameters.report_parameters []pipelines.IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue INPUT STATE +resources.pipelines.*.ingestion_definition.objects[*].report.table_configuration.workday_report_parameters.report_parameters[*] pipelines.IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue INPUT STATE +resources.pipelines.*.ingestion_definition.objects[*].report.table_configuration.workday_report_parameters.report_parameters[*].key string INPUT STATE +resources.pipelines.*.ingestion_definition.objects[*].report.table_configuration.workday_report_parameters.report_parameters[*].value string INPUT STATE resources.pipelines.*.ingestion_definition.objects[*].schema *pipelines.SchemaSpec INPUT STATE resources.pipelines.*.ingestion_definition.objects[*].schema.destination_catalog string INPUT STATE resources.pipelines.*.ingestion_definition.objects[*].schema.destination_schema string INPUT STATE @@ -2078,6 +2096,14 @@ resources.pipelines.*.ingestion_definition.objects[*].schema.table_configuration resources.pipelines.*.ingestion_definition.objects[*].schema.table_configuration.scd_type pipelines.TableSpecificConfigScdType INPUT STATE resources.pipelines.*.ingestion_definition.objects[*].schema.table_configuration.sequence_by []string INPUT STATE resources.pipelines.*.ingestion_definition.objects[*].schema.table_configuration.sequence_by[*] string INPUT STATE +resources.pipelines.*.ingestion_definition.objects[*].schema.table_configuration.workday_report_parameters *pipelines.IngestionPipelineDefinitionWorkdayReportParameters INPUT STATE +resources.pipelines.*.ingestion_definition.objects[*].schema.table_configuration.workday_report_parameters.incremental bool INPUT STATE +resources.pipelines.*.ingestion_definition.objects[*].schema.table_configuration.workday_report_parameters.parameters map[string]string INPUT STATE +resources.pipelines.*.ingestion_definition.objects[*].schema.table_configuration.workday_report_parameters.parameters.* string INPUT STATE +resources.pipelines.*.ingestion_definition.objects[*].schema.table_configuration.workday_report_parameters.report_parameters []pipelines.IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue INPUT STATE +resources.pipelines.*.ingestion_definition.objects[*].schema.table_configuration.workday_report_parameters.report_parameters[*] pipelines.IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue INPUT STATE +resources.pipelines.*.ingestion_definition.objects[*].schema.table_configuration.workday_report_parameters.report_parameters[*].key string INPUT STATE +resources.pipelines.*.ingestion_definition.objects[*].schema.table_configuration.workday_report_parameters.report_parameters[*].value string INPUT STATE resources.pipelines.*.ingestion_definition.objects[*].table *pipelines.TableSpec INPUT STATE resources.pipelines.*.ingestion_definition.objects[*].table.destination_catalog string INPUT STATE resources.pipelines.*.ingestion_definition.objects[*].table.destination_schema string INPUT STATE @@ -2101,6 +2127,14 @@ resources.pipelines.*.ingestion_definition.objects[*].table.table_configuration. resources.pipelines.*.ingestion_definition.objects[*].table.table_configuration.scd_type pipelines.TableSpecificConfigScdType INPUT STATE resources.pipelines.*.ingestion_definition.objects[*].table.table_configuration.sequence_by []string INPUT STATE resources.pipelines.*.ingestion_definition.objects[*].table.table_configuration.sequence_by[*] string INPUT STATE +resources.pipelines.*.ingestion_definition.objects[*].table.table_configuration.workday_report_parameters *pipelines.IngestionPipelineDefinitionWorkdayReportParameters INPUT STATE +resources.pipelines.*.ingestion_definition.objects[*].table.table_configuration.workday_report_parameters.incremental bool INPUT STATE +resources.pipelines.*.ingestion_definition.objects[*].table.table_configuration.workday_report_parameters.parameters map[string]string INPUT STATE +resources.pipelines.*.ingestion_definition.objects[*].table.table_configuration.workday_report_parameters.parameters.* string INPUT STATE +resources.pipelines.*.ingestion_definition.objects[*].table.table_configuration.workday_report_parameters.report_parameters []pipelines.IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue INPUT STATE +resources.pipelines.*.ingestion_definition.objects[*].table.table_configuration.workday_report_parameters.report_parameters[*] pipelines.IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue INPUT STATE +resources.pipelines.*.ingestion_definition.objects[*].table.table_configuration.workday_report_parameters.report_parameters[*].key string INPUT STATE +resources.pipelines.*.ingestion_definition.objects[*].table.table_configuration.workday_report_parameters.report_parameters[*].value string INPUT STATE resources.pipelines.*.ingestion_definition.source_configurations []pipelines.SourceConfig INPUT STATE resources.pipelines.*.ingestion_definition.source_configurations[*] pipelines.SourceConfig INPUT STATE resources.pipelines.*.ingestion_definition.source_configurations[*].catalog *pipelines.SourceCatalogConfig INPUT STATE @@ -2126,6 +2160,14 @@ resources.pipelines.*.ingestion_definition.table_configuration.salesforce_includ resources.pipelines.*.ingestion_definition.table_configuration.scd_type pipelines.TableSpecificConfigScdType INPUT STATE resources.pipelines.*.ingestion_definition.table_configuration.sequence_by []string INPUT STATE resources.pipelines.*.ingestion_definition.table_configuration.sequence_by[*] string INPUT STATE +resources.pipelines.*.ingestion_definition.table_configuration.workday_report_parameters *pipelines.IngestionPipelineDefinitionWorkdayReportParameters INPUT STATE +resources.pipelines.*.ingestion_definition.table_configuration.workday_report_parameters.incremental bool INPUT STATE +resources.pipelines.*.ingestion_definition.table_configuration.workday_report_parameters.parameters map[string]string INPUT STATE +resources.pipelines.*.ingestion_definition.table_configuration.workday_report_parameters.parameters.* string INPUT STATE +resources.pipelines.*.ingestion_definition.table_configuration.workday_report_parameters.report_parameters []pipelines.IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue INPUT STATE +resources.pipelines.*.ingestion_definition.table_configuration.workday_report_parameters.report_parameters[*] pipelines.IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue INPUT STATE +resources.pipelines.*.ingestion_definition.table_configuration.workday_report_parameters.report_parameters[*].key string INPUT STATE +resources.pipelines.*.ingestion_definition.table_configuration.workday_report_parameters.report_parameters[*].value string INPUT STATE resources.pipelines.*.last_modified int64 REMOTE resources.pipelines.*.latest_updates []pipelines.UpdateStateInfo REMOTE resources.pipelines.*.latest_updates[*] pipelines.UpdateStateInfo REMOTE @@ -2295,6 +2337,7 @@ resources.pipelines.*.spec.id string REMOTE resources.pipelines.*.spec.ingestion_definition *pipelines.IngestionPipelineDefinition REMOTE resources.pipelines.*.spec.ingestion_definition.connection_name string REMOTE resources.pipelines.*.spec.ingestion_definition.ingestion_gateway_id string REMOTE +resources.pipelines.*.spec.ingestion_definition.netsuite_jar_path string REMOTE resources.pipelines.*.spec.ingestion_definition.objects []pipelines.IngestionConfig REMOTE resources.pipelines.*.spec.ingestion_definition.objects[*] pipelines.IngestionConfig REMOTE resources.pipelines.*.spec.ingestion_definition.objects[*].report *pipelines.ReportSpec REMOTE @@ -2318,6 +2361,14 @@ resources.pipelines.*.spec.ingestion_definition.objects[*].report.table_configur resources.pipelines.*.spec.ingestion_definition.objects[*].report.table_configuration.scd_type pipelines.TableSpecificConfigScdType REMOTE resources.pipelines.*.spec.ingestion_definition.objects[*].report.table_configuration.sequence_by []string REMOTE resources.pipelines.*.spec.ingestion_definition.objects[*].report.table_configuration.sequence_by[*] string REMOTE +resources.pipelines.*.spec.ingestion_definition.objects[*].report.table_configuration.workday_report_parameters *pipelines.IngestionPipelineDefinitionWorkdayReportParameters REMOTE +resources.pipelines.*.spec.ingestion_definition.objects[*].report.table_configuration.workday_report_parameters.incremental bool REMOTE +resources.pipelines.*.spec.ingestion_definition.objects[*].report.table_configuration.workday_report_parameters.parameters map[string]string REMOTE +resources.pipelines.*.spec.ingestion_definition.objects[*].report.table_configuration.workday_report_parameters.parameters.* string REMOTE +resources.pipelines.*.spec.ingestion_definition.objects[*].report.table_configuration.workday_report_parameters.report_parameters []pipelines.IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue REMOTE +resources.pipelines.*.spec.ingestion_definition.objects[*].report.table_configuration.workday_report_parameters.report_parameters[*] pipelines.IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue REMOTE +resources.pipelines.*.spec.ingestion_definition.objects[*].report.table_configuration.workday_report_parameters.report_parameters[*].key string REMOTE +resources.pipelines.*.spec.ingestion_definition.objects[*].report.table_configuration.workday_report_parameters.report_parameters[*].value string REMOTE resources.pipelines.*.spec.ingestion_definition.objects[*].schema *pipelines.SchemaSpec REMOTE resources.pipelines.*.spec.ingestion_definition.objects[*].schema.destination_catalog string REMOTE resources.pipelines.*.spec.ingestion_definition.objects[*].schema.destination_schema string REMOTE @@ -2339,6 +2390,14 @@ resources.pipelines.*.spec.ingestion_definition.objects[*].schema.table_configur resources.pipelines.*.spec.ingestion_definition.objects[*].schema.table_configuration.scd_type pipelines.TableSpecificConfigScdType REMOTE resources.pipelines.*.spec.ingestion_definition.objects[*].schema.table_configuration.sequence_by []string REMOTE resources.pipelines.*.spec.ingestion_definition.objects[*].schema.table_configuration.sequence_by[*] string REMOTE +resources.pipelines.*.spec.ingestion_definition.objects[*].schema.table_configuration.workday_report_parameters *pipelines.IngestionPipelineDefinitionWorkdayReportParameters REMOTE +resources.pipelines.*.spec.ingestion_definition.objects[*].schema.table_configuration.workday_report_parameters.incremental bool REMOTE +resources.pipelines.*.spec.ingestion_definition.objects[*].schema.table_configuration.workday_report_parameters.parameters map[string]string REMOTE +resources.pipelines.*.spec.ingestion_definition.objects[*].schema.table_configuration.workday_report_parameters.parameters.* string REMOTE +resources.pipelines.*.spec.ingestion_definition.objects[*].schema.table_configuration.workday_report_parameters.report_parameters []pipelines.IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue REMOTE +resources.pipelines.*.spec.ingestion_definition.objects[*].schema.table_configuration.workday_report_parameters.report_parameters[*] pipelines.IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue REMOTE +resources.pipelines.*.spec.ingestion_definition.objects[*].schema.table_configuration.workday_report_parameters.report_parameters[*].key string REMOTE +resources.pipelines.*.spec.ingestion_definition.objects[*].schema.table_configuration.workday_report_parameters.report_parameters[*].value string REMOTE resources.pipelines.*.spec.ingestion_definition.objects[*].table *pipelines.TableSpec REMOTE resources.pipelines.*.spec.ingestion_definition.objects[*].table.destination_catalog string REMOTE resources.pipelines.*.spec.ingestion_definition.objects[*].table.destination_schema string REMOTE @@ -2362,6 +2421,14 @@ resources.pipelines.*.spec.ingestion_definition.objects[*].table.table_configura resources.pipelines.*.spec.ingestion_definition.objects[*].table.table_configuration.scd_type pipelines.TableSpecificConfigScdType REMOTE resources.pipelines.*.spec.ingestion_definition.objects[*].table.table_configuration.sequence_by []string REMOTE resources.pipelines.*.spec.ingestion_definition.objects[*].table.table_configuration.sequence_by[*] string REMOTE +resources.pipelines.*.spec.ingestion_definition.objects[*].table.table_configuration.workday_report_parameters *pipelines.IngestionPipelineDefinitionWorkdayReportParameters REMOTE +resources.pipelines.*.spec.ingestion_definition.objects[*].table.table_configuration.workday_report_parameters.incremental bool REMOTE +resources.pipelines.*.spec.ingestion_definition.objects[*].table.table_configuration.workday_report_parameters.parameters map[string]string REMOTE +resources.pipelines.*.spec.ingestion_definition.objects[*].table.table_configuration.workday_report_parameters.parameters.* string REMOTE +resources.pipelines.*.spec.ingestion_definition.objects[*].table.table_configuration.workday_report_parameters.report_parameters []pipelines.IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue REMOTE +resources.pipelines.*.spec.ingestion_definition.objects[*].table.table_configuration.workday_report_parameters.report_parameters[*] pipelines.IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue REMOTE +resources.pipelines.*.spec.ingestion_definition.objects[*].table.table_configuration.workday_report_parameters.report_parameters[*].key string REMOTE +resources.pipelines.*.spec.ingestion_definition.objects[*].table.table_configuration.workday_report_parameters.report_parameters[*].value string REMOTE resources.pipelines.*.spec.ingestion_definition.source_configurations []pipelines.SourceConfig REMOTE resources.pipelines.*.spec.ingestion_definition.source_configurations[*] pipelines.SourceConfig REMOTE resources.pipelines.*.spec.ingestion_definition.source_configurations[*].catalog *pipelines.SourceCatalogConfig REMOTE @@ -2387,6 +2454,14 @@ resources.pipelines.*.spec.ingestion_definition.table_configuration.salesforce_i resources.pipelines.*.spec.ingestion_definition.table_configuration.scd_type pipelines.TableSpecificConfigScdType REMOTE resources.pipelines.*.spec.ingestion_definition.table_configuration.sequence_by []string REMOTE resources.pipelines.*.spec.ingestion_definition.table_configuration.sequence_by[*] string REMOTE +resources.pipelines.*.spec.ingestion_definition.table_configuration.workday_report_parameters *pipelines.IngestionPipelineDefinitionWorkdayReportParameters REMOTE +resources.pipelines.*.spec.ingestion_definition.table_configuration.workday_report_parameters.incremental bool REMOTE +resources.pipelines.*.spec.ingestion_definition.table_configuration.workday_report_parameters.parameters map[string]string REMOTE +resources.pipelines.*.spec.ingestion_definition.table_configuration.workday_report_parameters.parameters.* string REMOTE +resources.pipelines.*.spec.ingestion_definition.table_configuration.workday_report_parameters.report_parameters []pipelines.IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue REMOTE +resources.pipelines.*.spec.ingestion_definition.table_configuration.workday_report_parameters.report_parameters[*] pipelines.IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue REMOTE +resources.pipelines.*.spec.ingestion_definition.table_configuration.workday_report_parameters.report_parameters[*].key string REMOTE +resources.pipelines.*.spec.ingestion_definition.table_configuration.workday_report_parameters.report_parameters[*].value string REMOTE resources.pipelines.*.spec.libraries []pipelines.PipelineLibrary REMOTE resources.pipelines.*.spec.libraries[*] pipelines.PipelineLibrary REMOTE resources.pipelines.*.spec.libraries[*].file *pipelines.FileLibrary REMOTE diff --git a/acceptance/cmd/account/account-help/output.txt b/acceptance/cmd/account/account-help/output.txt index 49b600e26b..007525f3e9 100644 --- a/acceptance/cmd/account/account-help/output.txt +++ b/acceptance/cmd/account/account-help/output.txt @@ -8,8 +8,11 @@ Usage: Identity and Access Management access-control These APIs manage access rules on resources in an account. groups Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects. + groups-v2 Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects. service-principals Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms. + service-principals-v2 Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms. users User identities recognized by Databricks and represented by email addresses. + users-v2 User identities recognized by Databricks and represented by email addresses. workspace-assignment The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account. Unity Catalog @@ -24,6 +27,9 @@ Settings settings Accounts Settings API allows users to manage settings at the account level. workspace-network-configuration These APIs allow configuration of network settings for Databricks workspaces by selecting which network policy to associate with the workspace. +Settings (v2) + settings-v2 APIs to manage account level settings. + Provisioning credentials These APIs manage credential configurations for this workspace. encryption-keys These APIs manage encryption key configurations for this workspace (optional). @@ -48,6 +54,9 @@ OAuth service-principal-federation-policy These APIs manage service principal federation policies. service-principal-secrets These APIs enable administrators to manage service principal secrets. +Identity and Access Management (v2) + iam-v2 These APIs are used to manage identities and the workspace access of these identities in . + Flags: -h, --help help for account diff --git a/acceptance/cmd/workspace/database/update-database-instance/output.txt b/acceptance/cmd/workspace/database/update-database-instance/output.txt index 8772dca783..9be04cd49d 100644 --- a/acceptance/cmd/workspace/database/update-database-instance/output.txt +++ b/acceptance/cmd/workspace/database/update-database-instance/output.txt @@ -7,13 +7,13 @@ Usage: Flags: --capacity string The sku of the instance. - --enable-pg-native-login Whether the instance has PG native password login enabled. + --enable-pg-native-login Whether to enable PG native password login on the instance. --enable-readable-secondaries Whether to enable secondaries to serve read-only traffic. -h, --help help for update-database-instance --json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes)) --node-count int The number of nodes in the instance, composed of 1 primary and 0 or more secondaries. --retention-window-in-days int The retention window for the instance. - --stopped Whether the instance is stopped. + --stopped Whether to stop the instance. Global Flags: --debug enable debug logging diff --git a/acceptance/help/output.txt b/acceptance/help/output.txt index 6c454de344..4e80539a7e 100644 --- a/acceptance/help/output.txt +++ b/acceptance/help/output.txt @@ -37,9 +37,12 @@ Real-time Serving Identity and Access Management current-user This API allows retrieving information about currently authenticated user or service principal. groups Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects. + groups-v2 Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects. permissions Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints. service-principals Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms. + service-principals-v2 Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms. users User identities recognized by Databricks and represented by email addresses. + users-v2 User identities recognized by Databricks and represented by email addresses. Databricks SQL alerts The alerts API can be used to perform CRUD operations on alerts. @@ -142,6 +145,9 @@ Quality Monitor v2 OAuth service-principal-secrets-proxy These APIs enable administrators to manage service principal secrets at the workspace level. +Identity and Access Management (v2) + workspace-iam-v2 These APIs are used to manage identities and the workspace access of these identities in . + Additional Commands: account Databricks Account Commands api Perform Databricks API call @@ -150,8 +156,9 @@ Additional Commands: configure Configure authentication help Help about any command labs Manage Databricks Labs installations - tag-policies The Tag Policy API allows you to manage tag policies in Databricks. + tag-policies The Tag Policy API allows you to manage policies for governed tags in Databricks. version Retrieve information about the current version of this CLI + workspace-settings-v2 APIs to manage workspace level settings. Flags: --debug enable debug logging diff --git a/bundle/config/variable/resolve_service_principal.go b/bundle/config/variable/resolve_service_principal.go index 03b8e30892..c7b299ccaa 100644 --- a/bundle/config/variable/resolve_service_principal.go +++ b/bundle/config/variable/resolve_service_principal.go @@ -11,6 +11,7 @@ type resolveServicePrincipal struct { } func (l resolveServicePrincipal) Resolve(ctx context.Context, w *databricks.WorkspaceClient) (string, error) { + //nolint:staticcheck // this API is deprecated but we still need use it as there is no replacement yet. entity, err := w.ServicePrincipals.GetByDisplayName(ctx, l.name) if err != nil { return "", err diff --git a/bundle/internal/schema/annotations.yml b/bundle/internal/schema/annotations.yml index 08d072226f..09b28d1586 100644 --- a/bundle/internal/schema/annotations.yml +++ b/bundle/internal/schema/annotations.yml @@ -478,6 +478,10 @@ github.com/databricks/cli/bundle/config/resources.DashboardPermission: "user_name": "description": |- PLACEHOLDER +github.com/databricks/cli/bundle/config/resources.DatabaseInstance: + "effective_capacity": + "description": |- + PLACEHOLDER github.com/databricks/cli/bundle/config/resources.DatabaseInstancePermission: "group_name": "description": |- diff --git a/bundle/internal/schema/annotations_openapi.yml b/bundle/internal/schema/annotations_openapi.yml index 6fe5e32bbe..e0f6063a8b 100644 --- a/bundle/internal/schema/annotations_openapi.yml +++ b/bundle/internal/schema/annotations_openapi.yml @@ -356,7 +356,7 @@ github.com/databricks/cli/bundle/config/resources.DatabaseInstance: true "enable_pg_native_login": "description": |- - Whether the instance has PG native password login enabled. Defaults to true. + Whether to enable PG native password login on the instance. Defaults to false. "enable_readable_secondaries": "description": |- Whether to enable secondaries to serve read-only traffic. Defaults to false. @@ -366,7 +366,7 @@ github.com/databricks/cli/bundle/config/resources.DatabaseInstance: "node_count": "description": |- The number of nodes in the instance, composed of 1 primary and 0 or more secondaries. Defaults to - 1 primary and 0 secondaries. + 1 primary and 0 secondaries. This field is input only, see effective_node_count for the output. "parent_instance_ref": "description": |- The ref of the parent instance. This is only available if the instance is @@ -401,7 +401,7 @@ github.com/databricks/cli/bundle/config/resources.DatabaseInstance: true "stopped": "description": |- - Whether the instance is stopped. + Whether to stop the instance. An input only param, see effective_stopped for the output. "uid": "description": |- An immutable UUID identifier for the instance. @@ -659,8 +659,6 @@ github.com/databricks/cli/bundle/config/resources.Pipeline: Write-only setting, available only in Create/Update calls. Specifies the user or service principal that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline. Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is thrown. - "x-databricks-preview": |- - PRIVATE "schema": "description": |- The default schema (database) where tables are read from or published to. @@ -1758,6 +1756,13 @@ github.com/databricks/databricks-sdk-go/service/compute.Environment: Each version comes with a specific Python version and a set of Python packages. The version is a string, consisting of an integer. "jar_dependencies": + "description": |- + Use `java_dependencies` instead. + "deprecation_message": |- + This field is deprecated + "x-databricks-preview": |- + PRIVATE + "java_dependencies": "description": |- List of jar dependencies, should be string representing volume paths. For example: `/Volumes/path/to/test.jar`. "x-databricks-preview": |- @@ -2033,9 +2038,6 @@ github.com/databricks/databricks-sdk-go/service/database.DatabaseInstanceRef: Output: Only populated if provided as input to create a child instance. "effective_lsn": "description": |- - xref AIP-129. `lsn` is owned by the client, while `effective_lsn` is owned by the server. - `lsn` will only be set in Create/Update response messages if and only if the user provides the field via the request. - `effective_lsn` on the other hand will always bet set in all response messages (Create/Update/Get/List). For a parent ref instance, this is the LSN on the parent instance from which the instance was created. For a child ref instance, this is the LSN on the instance from which the child instance @@ -3233,7 +3235,7 @@ github.com/databricks/databricks-sdk-go/service/jobs.TableUpdateTriggerConfigura github.com/databricks/databricks-sdk-go/service/jobs.Task: "clean_rooms_notebook_task": "description": |- - The task runs a [clean rooms](https://docs.databricks.com/en/clean-rooms/index.html) notebook + The task runs a [clean rooms](https://docs.databricks.com/clean-rooms/index.html) notebook when the `clean_rooms_notebook_task` field is present. "condition_task": "description": |- @@ -3265,6 +3267,11 @@ github.com/databricks/databricks-sdk-go/service/jobs.Task: "disable_auto_optimization": "description": |- An option to disable auto optimization in serverless + "disabled": + "description": |- + An optional flag to disable the task. If set to true, the task will not run even if it is part of a job. + "x-databricks-preview": |- + PRIVATE "email_notifications": "description": |- An optional set of email addresses that is notified when runs of this task begin or complete as well as when this task is deleted. The default behavior is to not send any emails. @@ -3342,15 +3349,9 @@ github.com/databricks/databricks-sdk-go/service/jobs.Task: The task runs a Python file when the `spark_python_task` field is present. "spark_submit_task": "description": |- - (Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present. This task can run only on new clusters and is not compatible with serverless compute. - - In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark configurations. - - `master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you _cannot_ specify them in parameters. - - By default, the Spark submit job uses all available memory (excluding reserved memory for Databricks services). You can set `--driver-memory`, and `--executor-memory` to a smaller value to leave some room for off-heap usage. - - The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths. + (Legacy) The task runs the spark-submit script when the spark_submit_task field is present. Databricks recommends using the spark_jar_task instead; see [Spark Submit task for jobs](/jobs/spark-submit). + "deprecation_message": |- + This field is deprecated "sql_task": "description": |- The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when the `sql_task` field is present. @@ -3571,6 +3572,13 @@ github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefin "ingestion_gateway_id": "description": |- Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate with the source database. This is used with connectors to databases like SQL Server. + "netsuite_jar_path": + "description": |- + Netsuite only configuration. When the field is set for a netsuite connector, + the jar stored in the field will be validated and added to the classpath of + pipeline's cluster. + "x-databricks-preview": |- + PRIVATE "objects": "description": |- Required. Settings specifying tables to replicate and the destination for the replicated tables. @@ -3626,6 +3634,40 @@ github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefin This field is mutable and can be updated without triggering a full snapshot. "x-databricks-preview": |- PRIVATE +github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinitionWorkdayReportParameters: + "incremental": + "description": |- + (Optional) Marks the report as incremental. + This field is deprecated and should not be used. Use `parameters` instead. The incremental behavior is now + controlled by the `parameters` field. + "deprecation_message": |- + This field is deprecated + "parameters": + "description": |- + Parameters for the Workday report. Each key represents the parameter name (e.g., "start_date", "end_date"), + and the corresponding value is a SQL-like expression used to compute the parameter value at runtime. + Example: + { + "start_date": "{ coalesce(current_offset(), date(\"2025-02-01\")) }", + "end_date": "{ current_date() - INTERVAL 1 DAY }" + } + "report_parameters": + "description": |- + (Optional) Additional custom parameters for Workday Report + This field is deprecated and should not be used. Use `parameters` instead. + "deprecation_message": |- + This field is deprecated +github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue: + "key": + "description": |- + Key for the report parameter, can be a column name or other metadata + "value": + "description": |- + Value for the report parameter. + Possible values it can take are these sql functions: + 1. coalesce(current_offset(), date("YYYY-MM-DD")) -> if current_offset() is null, then the passed date, else current_offset() + 2. current_date() + 3. date_sub(current_date(), x) -> subtract x (some non-negative integer) days from current date github.com/databricks/databricks-sdk-go/service/pipelines.IngestionSourceType: "_": "enum": @@ -3665,6 +3707,8 @@ github.com/databricks/databricks-sdk-go/service/pipelines.IngestionSourceType: CONFLUENCE - |- META_MARKETING + - |- + FOREIGN_CATALOG github.com/databricks/databricks-sdk-go/service/pipelines.ManualTrigger: {} github.com/databricks/databricks-sdk-go/service/pipelines.NotebookLibrary: "path": @@ -4017,6 +4061,11 @@ github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig: "sequence_by": "description": |- The column names specifying the logical order of events in the source data. Delta Live Tables uses this sequencing to handle change events that arrive out of order. + "workday_report_parameters": + "description": |- + (Optional) Additional custom parameters for Workday Report + "x-databricks-preview": |- + PRIVATE github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfigScdType: "_": "description": |- diff --git a/bundle/internal/validation/generated/enum_fields.go b/bundle/internal/validation/generated/enum_fields.go index 11b93c00ca..2f24e03a4c 100644 --- a/bundle/internal/validation/generated/enum_fields.go +++ b/bundle/internal/validation/generated/enum_fields.go @@ -119,7 +119,7 @@ var EnumFields = map[string][]string{ "resources.pipelines.*.ingestion_definition.objects[*].report.table_configuration.scd_type": {"APPEND_ONLY", "SCD_TYPE_1", "SCD_TYPE_2"}, "resources.pipelines.*.ingestion_definition.objects[*].schema.table_configuration.scd_type": {"APPEND_ONLY", "SCD_TYPE_1", "SCD_TYPE_2"}, "resources.pipelines.*.ingestion_definition.objects[*].table.table_configuration.scd_type": {"APPEND_ONLY", "SCD_TYPE_1", "SCD_TYPE_2"}, - "resources.pipelines.*.ingestion_definition.source_type": {"BIGQUERY", "CONFLUENCE", "DYNAMICS365", "GA4_RAW_DATA", "MANAGED_POSTGRESQL", "META_MARKETING", "MYSQL", "NETSUITE", "ORACLE", "POSTGRESQL", "REDSHIFT", "SALESFORCE", "SERVICENOW", "SHAREPOINT", "SQLDW", "SQLSERVER", "TERADATA", "WORKDAY_RAAS"}, + "resources.pipelines.*.ingestion_definition.source_type": {"BIGQUERY", "CONFLUENCE", "DYNAMICS365", "FOREIGN_CATALOG", "GA4_RAW_DATA", "MANAGED_POSTGRESQL", "META_MARKETING", "MYSQL", "NETSUITE", "ORACLE", "POSTGRESQL", "REDSHIFT", "SALESFORCE", "SERVICENOW", "SHAREPOINT", "SQLDW", "SQLSERVER", "TERADATA", "WORKDAY_RAAS"}, "resources.pipelines.*.ingestion_definition.table_configuration.scd_type": {"APPEND_ONLY", "SCD_TYPE_1", "SCD_TYPE_2"}, "resources.pipelines.*.restart_window.days_of_week[*]": {"FRIDAY", "MONDAY", "SATURDAY", "SUNDAY", "THURSDAY", "TUESDAY", "WEDNESDAY"}, diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index 172d2f4226..6a64910fcd 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -606,6 +606,9 @@ "$ref": "#/$defs/string", "doNotSuggest": true }, + "effective_capacity": { + "$ref": "#/$defs/string" + }, "effective_enable_pg_native_login": { "description": "xref AIP-129. `enable_pg_native_login` is owned by the client, while `effective_enable_pg_native_login` is owned by the server.\n`enable_pg_native_login` will only be set in Create/Update response messages if and only if the user provides the field via the request.\n`effective_enable_pg_native_login` on the other hand will always bet set in all response messages (Create/Update/Get/List).", "$ref": "#/$defs/bool", @@ -632,7 +635,7 @@ "doNotSuggest": true }, "enable_pg_native_login": { - "description": "Whether the instance has PG native password login enabled. Defaults to true.", + "description": "Whether to enable PG native password login on the instance. Defaults to false.", "$ref": "#/$defs/bool" }, "enable_readable_secondaries": { @@ -648,7 +651,7 @@ "$ref": "#/$defs/string" }, "node_count": { - "description": "The number of nodes in the instance, composed of 1 primary and 0 or more secondaries. Defaults to\n1 primary and 0 secondaries.", + "description": "The number of nodes in the instance, composed of 1 primary and 0 or more secondaries. Defaults to\n1 primary and 0 secondaries. This field is input only, see effective_node_count for the output.", "$ref": "#/$defs/int" }, "parent_instance_ref": { @@ -683,7 +686,7 @@ "doNotSuggest": true }, "stopped": { - "description": "Whether the instance is stopped.", + "description": "Whether to stop the instance. An input only param, see effective_stopped for the output.", "$ref": "#/$defs/bool" }, "uid": { @@ -1371,9 +1374,7 @@ "$ref": "#/$defs/string" }, "run_as": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.RunAs", - "x-databricks-preview": "PRIVATE", - "doNotSuggest": true + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.RunAs" }, "schema": { "description": "The default schema (database) where tables are read from or published to.", @@ -4031,6 +4032,14 @@ "$ref": "#/$defs/string" }, "jar_dependencies": { + "description": "Use `java_dependencies` instead.", + "$ref": "#/$defs/slice/string", + "x-databricks-preview": "PRIVATE", + "deprecationMessage": "This field is deprecated", + "doNotSuggest": true, + "deprecated": true + }, + "java_dependencies": { "description": "List of jar dependencies, should be string representing volume paths. For example: `/Volumes/path/to/test.jar`.", "$ref": "#/$defs/slice/string", "x-databricks-preview": "PRIVATE", @@ -4496,7 +4505,7 @@ "$ref": "#/$defs/string" }, "effective_lsn": { - "description": "xref AIP-129. `lsn` is owned by the client, while `effective_lsn` is owned by the server.\n`lsn` will only be set in Create/Update response messages if and only if the user provides the field via the request.\n`effective_lsn` on the other hand will always bet set in all response messages (Create/Update/Get/List).\nFor a parent ref instance, this is the LSN on the parent instance from which the\ninstance was created.\nFor a child ref instance, this is the LSN on the instance from which the child instance\nwas created.", + "description": "For a parent ref instance, this is the LSN on the parent instance from which the\ninstance was created.\nFor a child ref instance, this is the LSN on the instance from which the child instance\nwas created.", "$ref": "#/$defs/string", "doNotSuggest": true }, @@ -6564,7 +6573,7 @@ "type": "object", "properties": { "clean_rooms_notebook_task": { - "description": "The task runs a [clean rooms](https://docs.databricks.com/en/clean-rooms/index.html) notebook\nwhen the `clean_rooms_notebook_task` field is present.", + "description": "The task runs a [clean rooms](https://docs.databricks.com/clean-rooms/index.html) notebook\nwhen the `clean_rooms_notebook_task` field is present.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.CleanRoomsNotebookTask" }, "condition_task": { @@ -6604,6 +6613,12 @@ "description": "An option to disable auto optimization in serverless", "$ref": "#/$defs/bool" }, + "disabled": { + "description": "An optional flag to disable the task. If set to true, the task will not run even if it is part of a job.", + "$ref": "#/$defs/bool", + "x-databricks-preview": "PRIVATE", + "doNotSuggest": true + }, "email_notifications": { "description": "An optional set of email addresses that is notified when runs of this task begin or complete as well as when this task is deleted. The default behavior is to not send any emails.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TaskEmailNotifications" @@ -6689,8 +6704,10 @@ "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SparkPythonTask" }, "spark_submit_task": { - "description": "(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present. This task can run only on new clusters and is not compatible with serverless compute.\n\nIn the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark configurations.\n\n`master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you _cannot_ specify them in parameters.\n\nBy default, the Spark submit job uses all available memory (excluding reserved memory for Databricks services). You can set `--driver-memory`, and `--executor-memory` to a smaller value to leave some room for off-heap usage.\n\nThe `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SparkSubmitTask" + "description": "(Legacy) The task runs the spark-submit script when the spark_submit_task field is present. Databricks recommends using the spark_jar_task instead; see [Spark Submit task for jobs](/jobs/spark-submit).", + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SparkSubmitTask", + "deprecationMessage": "This field is deprecated", + "deprecated": true }, "sql_task": { "description": "The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when the `sql_task` field is present.", @@ -7169,6 +7186,12 @@ "description": "Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate with the source database. This is used with connectors to databases like SQL Server.", "$ref": "#/$defs/string" }, + "netsuite_jar_path": { + "description": "Netsuite only configuration. When the field is set for a netsuite connector,\nthe jar stored in the field will be validated and added to the classpath of\npipeline's cluster.", + "$ref": "#/$defs/string", + "x-databricks-preview": "PRIVATE", + "doNotSuggest": true + }, "objects": { "description": "Required. Settings specifying tables to replicate and the destination for the replicated tables.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionConfig" @@ -7230,6 +7253,58 @@ } ] }, + "pipelines.IngestionPipelineDefinitionWorkdayReportParameters": { + "oneOf": [ + { + "type": "object", + "properties": { + "incremental": { + "description": "(Optional) Marks the report as incremental.\nThis field is deprecated and should not be used. Use `parameters` instead. The incremental behavior is now\ncontrolled by the `parameters` field.", + "$ref": "#/$defs/bool", + "deprecationMessage": "This field is deprecated", + "deprecated": true + }, + "parameters": { + "description": "Parameters for the Workday report. Each key represents the parameter name (e.g., \"start_date\", \"end_date\"),\nand the corresponding value is a SQL-like expression used to compute the parameter value at runtime.\nExample:\n{\n\"start_date\": \"{ coalesce(current_offset(), date(\\\"2025-02-01\\\")) }\",\n\"end_date\": \"{ current_date() - INTERVAL 1 DAY }\"\n}", + "$ref": "#/$defs/map/string" + }, + "report_parameters": { + "description": "(Optional) Additional custom parameters for Workday Report\nThis field is deprecated and should not be used. Use `parameters` instead.", + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue", + "deprecationMessage": "This field is deprecated", + "deprecated": true + } + }, + "additionalProperties": false + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "pipelines.IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue": { + "oneOf": [ + { + "type": "object", + "properties": { + "key": { + "description": "Key for the report parameter, can be a column name or other metadata", + "$ref": "#/$defs/string" + }, + "value": { + "description": "Value for the report parameter.\nPossible values it can take are these sql functions:\n1. coalesce(current_offset(), date(\"YYYY-MM-DD\")) -\u003e if current_offset() is null, then the passed date, else current_offset()\n2. current_date()\n3. date_sub(current_date(), x) -\u003e subtract x (some non-negative integer) days from current date", + "$ref": "#/$defs/string" + } + }, + "additionalProperties": false + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "pipelines.IngestionSourceType": { "oneOf": [ { @@ -7252,7 +7327,8 @@ "SHAREPOINT", "DYNAMICS365", "CONFLUENCE", - "META_MARKETING" + "META_MARKETING", + "FOREIGN_CATALOG" ] }, { @@ -7885,6 +7961,12 @@ "sequence_by": { "description": "The column names specifying the logical order of events in the source data. Delta Live Tables uses this sequencing to handle change events that arrive out of order.", "$ref": "#/$defs/slice/string" + }, + "workday_report_parameters": { + "description": "(Optional) Additional custom parameters for Workday Report", + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinitionWorkdayReportParameters", + "x-databricks-preview": "PRIVATE", + "doNotSuggest": true } }, "additionalProperties": false @@ -10110,6 +10192,20 @@ } ] }, + "pipelines.IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue": { + "oneOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue" + } + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "pipelines.Notifications": { "oneOf": [ { diff --git a/cmd/account/access-control/access-control.go b/cmd/account/access-control/access-control.go index 21af14f618..829dd02a90 100755 --- a/cmd/account/access-control/access-control.go +++ b/cmd/account/access-control/access-control.go @@ -73,7 +73,9 @@ func newGetAssignableRolesForResource() *cobra.Command { resource name for the account. resource=accounts//groups/ | A resource name for the group. resource=accounts//servicePrincipals/ | A - resource name for the service principal.` + resource name for the service principal. + resource=accounts//tagPolicies/ | A resource + name for the tag policy.` cmd.Annotations = make(map[string]string) @@ -140,6 +142,8 @@ func newGetRuleSet() *cobra.Command { for a rule set on the group. name=accounts//servicePrincipals//ruleSets/default | A name for a rule set on the service principal. + name=accounts//tagPolicies//ruleSets/default + | A name for a rule set on the tag policy. ETAG: Etag used for versioning. The response is at least as fresh as the eTag provided. Etag is used for optimistic concurrency control as a way to help prevent simultaneous updates of a rule set from overwriting each other. It diff --git a/cmd/account/cmd.go b/cmd/account/cmd.go index bdbab94761..d5f27f2b8f 100644 --- a/cmd/account/cmd.go +++ b/cmd/account/cmd.go @@ -3,7 +3,6 @@ package account import ( - "github.com/databricks/cli/libs/cmdgroup" "github.com/spf13/cobra" account_access_control "github.com/databricks/cli/cmd/account/access-control" @@ -14,7 +13,8 @@ import ( custom_app_integration "github.com/databricks/cli/cmd/account/custom-app-integration" encryption_keys "github.com/databricks/cli/cmd/account/encryption-keys" account_federation_policy "github.com/databricks/cli/cmd/account/federation-policy" - account_groups "github.com/databricks/cli/cmd/account/groups" + account_groups_v2 "github.com/databricks/cli/cmd/account/groups-v2" + account_iam_v2 "github.com/databricks/cli/cmd/account/iam-v2" account_ip_access_lists "github.com/databricks/cli/cmd/account/ip-access-lists" log_delivery "github.com/databricks/cli/cmd/account/log-delivery" account_metastore_assignments "github.com/databricks/cli/cmd/account/metastore-assignments" @@ -27,17 +27,21 @@ import ( published_app_integration "github.com/databricks/cli/cmd/account/published-app-integration" service_principal_federation_policy "github.com/databricks/cli/cmd/account/service-principal-federation-policy" service_principal_secrets "github.com/databricks/cli/cmd/account/service-principal-secrets" - account_service_principals "github.com/databricks/cli/cmd/account/service-principals" + account_service_principals_v2 "github.com/databricks/cli/cmd/account/service-principals-v2" account_settings "github.com/databricks/cli/cmd/account/settings" account_settings_v2 "github.com/databricks/cli/cmd/account/settings-v2" storage "github.com/databricks/cli/cmd/account/storage" account_storage_credentials "github.com/databricks/cli/cmd/account/storage-credentials" usage_dashboards "github.com/databricks/cli/cmd/account/usage-dashboards" - account_users "github.com/databricks/cli/cmd/account/users" + account_users_v2 "github.com/databricks/cli/cmd/account/users-v2" vpc_endpoints "github.com/databricks/cli/cmd/account/vpc-endpoints" workspace_assignment "github.com/databricks/cli/cmd/account/workspace-assignment" workspace_network_configuration "github.com/databricks/cli/cmd/account/workspace-network-configuration" workspaces "github.com/databricks/cli/cmd/account/workspaces" + + account_groups "github.com/databricks/cli/cmd/account/groups" + account_service_principals "github.com/databricks/cli/cmd/account/service-principals" + account_users "github.com/databricks/cli/cmd/account/users" ) func New() *cobra.Command { @@ -53,7 +57,7 @@ func New() *cobra.Command { cmd.AddCommand(custom_app_integration.New()) cmd.AddCommand(encryption_keys.New()) cmd.AddCommand(account_federation_policy.New()) - cmd.AddCommand(account_groups.New()) + cmd.AddCommand(account_groups_v2.New()) cmd.AddCommand(account_ip_access_lists.New()) cmd.AddCommand(log_delivery.New()) cmd.AddCommand(account_metastore_assignments.New()) @@ -66,25 +70,28 @@ func New() *cobra.Command { cmd.AddCommand(published_app_integration.New()) cmd.AddCommand(service_principal_federation_policy.New()) cmd.AddCommand(service_principal_secrets.New()) - cmd.AddCommand(account_service_principals.New()) + cmd.AddCommand(account_service_principals_v2.New()) cmd.AddCommand(account_settings.New()) cmd.AddCommand(account_settings_v2.New()) cmd.AddCommand(storage.New()) cmd.AddCommand(account_storage_credentials.New()) cmd.AddCommand(usage_dashboards.New()) - cmd.AddCommand(account_users.New()) + cmd.AddCommand(account_users_v2.New()) cmd.AddCommand(vpc_endpoints.New()) cmd.AddCommand(workspace_assignment.New()) cmd.AddCommand(workspace_network_configuration.New()) cmd.AddCommand(workspaces.New()) + cmd.AddCommand(account_iam_v2.New()) cmd.AddCommand(budgets.New()) - // Add account command groups, filtering out empty groups or groups with only hidden commands. - allGroups := Groups() - allCommands := cmd.Commands() - filteredGroups := cmdgroup.FilterGroups(allGroups, allCommands) - for i := range filteredGroups { - cmd.AddGroup(&filteredGroups[i]) + cmd.AddCommand(account_groups.New()) + cmd.AddCommand(account_service_principals.New()) + cmd.AddCommand(account_users.New()) + + // Register all groups with the parent command. + groups := Groups() + for i := range groups { + cmd.AddGroup(&groups[i]) } return cmd diff --git a/cmd/account/groups-v2/groups-v2.go b/cmd/account/groups-v2/groups-v2.go new file mode 100755 index 0000000000..a5f79859b3 --- /dev/null +++ b/cmd/account/groups-v2/groups-v2.go @@ -0,0 +1,448 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package groups_v2 + +import ( + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdctx" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/iam" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "groups-v2", + Short: `Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects.`, + Long: `Groups simplify identity management, making it easier to assign access to + Databricks account, data, and other securable objects. + + It is best practice to assign access to workspaces and access-control policies + in Unity Catalog to groups, instead of to users individually. All Databricks + account identities can be assigned as members of groups, and members inherit + permissions that are assigned to their group.`, + GroupID: "iam", + Annotations: map[string]string{ + "package": "iam", + }, + RunE: root.ReportUnknownSubcommand, + } + + // Add methods + cmd.AddCommand(newCreate()) + cmd.AddCommand(newDelete()) + cmd.AddCommand(newGet()) + cmd.AddCommand(newList()) + cmd.AddCommand(newPatch()) + cmd.AddCommand(newUpdate()) + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start create command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *iam.CreateAccountGroupRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq iam.CreateAccountGroupRequest + var createJson flags.JsonFlag + + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `String that represents a human-readable group name.`) + cmd.Flags().StringVar(&createReq.ExternalId, "external-id", createReq.ExternalId, ``) + cmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Databricks group ID.`) + // TODO: array: members + // TODO: complex arg: meta + // TODO: array: roles + + cmd.Use = "create" + cmd.Short = `Create a new group.` + cmd.Long = `Create a new group. + + Creates a group in the Databricks account with a unique name, using the + supplied group details.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(0) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + a := cmdctx.AccountClient(ctx) + + if cmd.Flags().Changed("json") { + diags := createJson.Unmarshal(&createReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + + response, err := a.GroupsV2.Create(ctx, createReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +// start delete command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *iam.DeleteAccountGroupRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq iam.DeleteAccountGroupRequest + + cmd.Use = "delete ID" + cmd.Short = `Delete a group.` + cmd.Long = `Delete a group. + + Deletes a group from the Databricks account. + + Arguments: + ID: Unique ID for a group in the Databricks account.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + a := cmdctx.AccountClient(ctx) + + deleteReq.Id = args[0] + + err = a.GroupsV2.Delete(ctx, deleteReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +// start get command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *iam.GetAccountGroupRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq iam.GetAccountGroupRequest + + cmd.Use = "get ID" + cmd.Short = `Get group details.` + cmd.Long = `Get group details. + + Gets the information for a specific group in the Databricks account. + + Arguments: + ID: Unique ID for a group in the Databricks account.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + a := cmdctx.AccountClient(ctx) + + getReq.Id = args[0] + + response, err := a.GroupsV2.Get(ctx, getReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +// start list command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *iam.ListAccountGroupsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq iam.ListAccountGroupsRequest + + cmd.Flags().StringVar(&listReq.Attributes, "attributes", listReq.Attributes, `Comma-separated list of attributes to return in response.`) + cmd.Flags().Int64Var(&listReq.Count, "count", listReq.Count, `Desired number of results per page.`) + cmd.Flags().StringVar(&listReq.ExcludedAttributes, "excluded-attributes", listReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) + cmd.Flags().StringVar(&listReq.Filter, "filter", listReq.Filter, `Query by which the results have to be filtered.`) + cmd.Flags().StringVar(&listReq.SortBy, "sort-by", listReq.SortBy, `Attribute to sort the results.`) + cmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order to sort the results. Supported values: [ascending, descending]`) + cmd.Flags().Int64Var(&listReq.StartIndex, "start-index", listReq.StartIndex, `Specifies the index of the first result.`) + + cmd.Use = "list" + cmd.Short = `List group details.` + cmd.Long = `List group details. + + Gets all details of the groups associated with the Databricks account. As of + 08/22/2025, this endpoint will not return members. Instead, members should be + retrieved by iterating through Get group details.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(0) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + a := cmdctx.AccountClient(ctx) + + response := a.GroupsV2.List(ctx, listReq) + return cmdio.RenderIterator(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +// start patch command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var patchOverrides []func( + *cobra.Command, + *iam.PatchAccountGroupRequest, +) + +func newPatch() *cobra.Command { + cmd := &cobra.Command{} + + var patchReq iam.PatchAccountGroupRequest + var patchJson flags.JsonFlag + + cmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: Operations + // TODO: array: schemas + + cmd.Use = "patch ID" + cmd.Short = `Update group details.` + cmd.Long = `Update group details. + + Partially updates the details of a group. + + Arguments: + ID: Unique ID in the Databricks workspace.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + a := cmdctx.AccountClient(ctx) + + if cmd.Flags().Changed("json") { + diags := patchJson.Unmarshal(&patchReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + patchReq.Id = args[0] + + err = a.GroupsV2.Patch(ctx, patchReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range patchOverrides { + fn(cmd, &patchReq) + } + + return cmd +} + +// start update command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *iam.UpdateAccountGroupRequest, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq iam.UpdateAccountGroupRequest + var updateJson flags.JsonFlag + + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `String that represents a human-readable group name.`) + cmd.Flags().StringVar(&updateReq.ExternalId, "external-id", updateReq.ExternalId, ``) + // TODO: array: members + // TODO: complex arg: meta + // TODO: array: roles + + cmd.Use = "update ID" + cmd.Short = `Replace a group.` + cmd.Long = `Replace a group. + + Updates the details of a group by replacing the entire group entity. + + Arguments: + ID: Databricks group ID` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + a := cmdctx.AccountClient(ctx) + + if cmd.Flags().Changed("json") { + diags := updateJson.Unmarshal(&updateReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + updateReq.Id = args[0] + + err = a.GroupsV2.Update(ctx, updateReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +// end service AccountGroupsV2 diff --git a/cmd/account/groups.go b/cmd/account/groups.go index 841c7736ad..e616b5ee81 100644 --- a/cmd/account/groups.go +++ b/cmd/account/groups.go @@ -34,5 +34,9 @@ func Groups() []cobra.Group { ID: "oauth2", Title: "OAuth", }, + { + ID: "iamv2", + Title: "Identity and Access Management (v2)", + }, } } diff --git a/cmd/account/iam-v2/iam-v2.go b/cmd/account/iam-v2/iam-v2.go new file mode 100755 index 0000000000..528f8efcc9 --- /dev/null +++ b/cmd/account/iam-v2/iam-v2.go @@ -0,0 +1,367 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package iam_v2 + +import ( + "fmt" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdctx" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/iamv2" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "iam-v2", + Short: `These APIs are used to manage identities and the workspace access of these identities in .`, + Long: `These APIs are used to manage identities and the workspace access of these + identities in .`, + GroupID: "iamv2", + Annotations: map[string]string{ + "package": "iamv2", + }, + RunE: root.ReportUnknownSubcommand, + } + + // Add methods + cmd.AddCommand(newGetWorkspaceAccessDetail()) + cmd.AddCommand(newResolveGroup()) + cmd.AddCommand(newResolveServicePrincipal()) + cmd.AddCommand(newResolveUser()) + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start get-workspace-access-detail command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getWorkspaceAccessDetailOverrides []func( + *cobra.Command, + *iamv2.GetWorkspaceAccessDetailRequest, +) + +func newGetWorkspaceAccessDetail() *cobra.Command { + cmd := &cobra.Command{} + + var getWorkspaceAccessDetailReq iamv2.GetWorkspaceAccessDetailRequest + + cmd.Flags().Var(&getWorkspaceAccessDetailReq.View, "view", `Controls what fields are returned. Supported values: [BASIC, FULL]`) + + cmd.Use = "get-workspace-access-detail WORKSPACE_ID PRINCIPAL_ID" + cmd.Short = `Get workspace access details for a principal.` + cmd.Long = `Get workspace access details for a principal. + + Returns the access details for a principal in a workspace. Allows for checking + access details for any provisioned principal (user, service principal, or + group) in a workspace. * Provisioned principal here refers to one that has + been synced into Databricks from the customer's IdP or added explicitly to + Databricks via SCIM/UI. Allows for passing in a "view" parameter to control + what fields are returned (BASIC by default or FULL). + + Arguments: + WORKSPACE_ID: Required. The workspace ID for which the access details are being + requested. + PRINCIPAL_ID: Required. The internal ID of the principal (user/sp/group) for which the + access details are being requested.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + a := cmdctx.AccountClient(ctx) + + _, err = fmt.Sscan(args[0], &getWorkspaceAccessDetailReq.WorkspaceId) + if err != nil { + return fmt.Errorf("invalid WORKSPACE_ID: %s", args[0]) + } + _, err = fmt.Sscan(args[1], &getWorkspaceAccessDetailReq.PrincipalId) + if err != nil { + return fmt.Errorf("invalid PRINCIPAL_ID: %s", args[1]) + } + + response, err := a.IamV2.GetWorkspaceAccessDetail(ctx, getWorkspaceAccessDetailReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getWorkspaceAccessDetailOverrides { + fn(cmd, &getWorkspaceAccessDetailReq) + } + + return cmd +} + +// start resolve-group command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var resolveGroupOverrides []func( + *cobra.Command, + *iamv2.ResolveGroupRequest, +) + +func newResolveGroup() *cobra.Command { + cmd := &cobra.Command{} + + var resolveGroupReq iamv2.ResolveGroupRequest + var resolveGroupJson flags.JsonFlag + + cmd.Flags().Var(&resolveGroupJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Use = "resolve-group EXTERNAL_ID" + cmd.Short = `Resolve an external group in the Databricks account.` + cmd.Long = `Resolve an external group in the Databricks account. + + Resolves a group with the given external ID from the customer's IdP. If the + group does not exist, it will be created in the account. If the customer is + not onboarded onto Automatic Identity Management (AIM), this will return an + error. + + Arguments: + EXTERNAL_ID: Required. The external ID of the group in the customer's IdP.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + if cmd.Flags().Changed("json") { + err := root.ExactArgs(0)(cmd, args) + if err != nil { + return fmt.Errorf("when --json flag is specified, no positional arguments are required. Provide 'external_id' in your JSON input") + } + return nil + } + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + a := cmdctx.AccountClient(ctx) + + if cmd.Flags().Changed("json") { + diags := resolveGroupJson.Unmarshal(&resolveGroupReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + if !cmd.Flags().Changed("json") { + resolveGroupReq.ExternalId = args[0] + } + + response, err := a.IamV2.ResolveGroup(ctx, resolveGroupReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range resolveGroupOverrides { + fn(cmd, &resolveGroupReq) + } + + return cmd +} + +// start resolve-service-principal command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var resolveServicePrincipalOverrides []func( + *cobra.Command, + *iamv2.ResolveServicePrincipalRequest, +) + +func newResolveServicePrincipal() *cobra.Command { + cmd := &cobra.Command{} + + var resolveServicePrincipalReq iamv2.ResolveServicePrincipalRequest + var resolveServicePrincipalJson flags.JsonFlag + + cmd.Flags().Var(&resolveServicePrincipalJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Use = "resolve-service-principal EXTERNAL_ID" + cmd.Short = `Resolve an external service principal in the Databricks account.` + cmd.Long = `Resolve an external service principal in the Databricks account. + + Resolves an SP with the given external ID from the customer's IdP. If the SP + does not exist, it will be created. If the customer is not onboarded onto + Automatic Identity Management (AIM), this will return an error. + + Arguments: + EXTERNAL_ID: Required. The external ID of the service principal in the customer's IdP.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + if cmd.Flags().Changed("json") { + err := root.ExactArgs(0)(cmd, args) + if err != nil { + return fmt.Errorf("when --json flag is specified, no positional arguments are required. Provide 'external_id' in your JSON input") + } + return nil + } + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + a := cmdctx.AccountClient(ctx) + + if cmd.Flags().Changed("json") { + diags := resolveServicePrincipalJson.Unmarshal(&resolveServicePrincipalReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + if !cmd.Flags().Changed("json") { + resolveServicePrincipalReq.ExternalId = args[0] + } + + response, err := a.IamV2.ResolveServicePrincipal(ctx, resolveServicePrincipalReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range resolveServicePrincipalOverrides { + fn(cmd, &resolveServicePrincipalReq) + } + + return cmd +} + +// start resolve-user command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var resolveUserOverrides []func( + *cobra.Command, + *iamv2.ResolveUserRequest, +) + +func newResolveUser() *cobra.Command { + cmd := &cobra.Command{} + + var resolveUserReq iamv2.ResolveUserRequest + var resolveUserJson flags.JsonFlag + + cmd.Flags().Var(&resolveUserJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Use = "resolve-user EXTERNAL_ID" + cmd.Short = `Resolve an external user in the Databricks account.` + cmd.Long = `Resolve an external user in the Databricks account. + + Resolves a user with the given external ID from the customer's IdP. If the + user does not exist, it will be created. If the customer is not onboarded onto + Automatic Identity Management (AIM), this will return an error. + + Arguments: + EXTERNAL_ID: Required. The external ID of the user in the customer's IdP.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + if cmd.Flags().Changed("json") { + err := root.ExactArgs(0)(cmd, args) + if err != nil { + return fmt.Errorf("when --json flag is specified, no positional arguments are required. Provide 'external_id' in your JSON input") + } + return nil + } + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + a := cmdctx.AccountClient(ctx) + + if cmd.Flags().Changed("json") { + diags := resolveUserJson.Unmarshal(&resolveUserReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + if !cmd.Flags().Changed("json") { + resolveUserReq.ExternalId = args[0] + } + + response, err := a.IamV2.ResolveUser(ctx, resolveUserReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range resolveUserOverrides { + fn(cmd, &resolveUserReq) + } + + return cmd +} + +// end service account_iamV2 diff --git a/cmd/account/service-principals-v2/service-principals-v2.go b/cmd/account/service-principals-v2/service-principals-v2.go new file mode 100755 index 0000000000..512ecaf204 --- /dev/null +++ b/cmd/account/service-principals-v2/service-principals-v2.go @@ -0,0 +1,448 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package service_principals_v2 + +import ( + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdctx" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/iam" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "service-principals-v2", + Short: `Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.`, + Long: `Identities for use with jobs, automated tools, and systems such as scripts, + apps, and CI/CD platforms. Databricks recommends creating service principals + to run production jobs or modify production data. If all processes that act on + production data run with service principals, interactive users do not need any + write, delete, or modify privileges in production. This eliminates the risk of + a user overwriting production data by accident.`, + GroupID: "iam", + Annotations: map[string]string{ + "package": "iam", + }, + RunE: root.ReportUnknownSubcommand, + } + + // Add methods + cmd.AddCommand(newCreate()) + cmd.AddCommand(newDelete()) + cmd.AddCommand(newGet()) + cmd.AddCommand(newList()) + cmd.AddCommand(newPatch()) + cmd.AddCommand(newUpdate()) + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start create command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *iam.CreateAccountServicePrincipalRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq iam.CreateAccountServicePrincipalRequest + var createJson flags.JsonFlag + + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().BoolVar(&createReq.Active, "active", createReq.Active, `If this user is active.`) + cmd.Flags().StringVar(&createReq.ApplicationId, "application-id", createReq.ApplicationId, `UUID relating to the service principal.`) + cmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `String that represents a concatenation of given and family names.`) + cmd.Flags().StringVar(&createReq.ExternalId, "external-id", createReq.ExternalId, ``) + cmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Databricks service principal ID.`) + // TODO: array: roles + + cmd.Use = "create" + cmd.Short = `Create a service principal.` + cmd.Long = `Create a service principal. + + Creates a new service principal in the Databricks account.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(0) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + a := cmdctx.AccountClient(ctx) + + if cmd.Flags().Changed("json") { + diags := createJson.Unmarshal(&createReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + + response, err := a.ServicePrincipalsV2.Create(ctx, createReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +// start delete command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *iam.DeleteAccountServicePrincipalRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq iam.DeleteAccountServicePrincipalRequest + + cmd.Use = "delete ID" + cmd.Short = `Delete a service principal.` + cmd.Long = `Delete a service principal. + + Delete a single service principal in the Databricks account. + + Arguments: + ID: Unique ID for a service principal in the Databricks account.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + a := cmdctx.AccountClient(ctx) + + deleteReq.Id = args[0] + + err = a.ServicePrincipalsV2.Delete(ctx, deleteReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +// start get command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *iam.GetAccountServicePrincipalRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq iam.GetAccountServicePrincipalRequest + + cmd.Use = "get ID" + cmd.Short = `Get service principal details.` + cmd.Long = `Get service principal details. + + Gets the details for a single service principal define in the Databricks + account. + + Arguments: + ID: Unique ID for a service principal in the Databricks account.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + a := cmdctx.AccountClient(ctx) + + getReq.Id = args[0] + + response, err := a.ServicePrincipalsV2.Get(ctx, getReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +// start list command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *iam.ListAccountServicePrincipalsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq iam.ListAccountServicePrincipalsRequest + + cmd.Flags().StringVar(&listReq.Attributes, "attributes", listReq.Attributes, `Comma-separated list of attributes to return in response.`) + cmd.Flags().Int64Var(&listReq.Count, "count", listReq.Count, `Desired number of results per page.`) + cmd.Flags().StringVar(&listReq.ExcludedAttributes, "excluded-attributes", listReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) + cmd.Flags().StringVar(&listReq.Filter, "filter", listReq.Filter, `Query by which the results have to be filtered.`) + cmd.Flags().StringVar(&listReq.SortBy, "sort-by", listReq.SortBy, `Attribute to sort the results.`) + cmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order to sort the results. Supported values: [ascending, descending]`) + cmd.Flags().Int64Var(&listReq.StartIndex, "start-index", listReq.StartIndex, `Specifies the index of the first result.`) + + cmd.Use = "list" + cmd.Short = `List service principals.` + cmd.Long = `List service principals. + + Gets the set of service principals associated with a Databricks account.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(0) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + a := cmdctx.AccountClient(ctx) + + response := a.ServicePrincipalsV2.List(ctx, listReq) + return cmdio.RenderIterator(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +// start patch command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var patchOverrides []func( + *cobra.Command, + *iam.PatchAccountServicePrincipalRequest, +) + +func newPatch() *cobra.Command { + cmd := &cobra.Command{} + + var patchReq iam.PatchAccountServicePrincipalRequest + var patchJson flags.JsonFlag + + cmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: Operations + // TODO: array: schemas + + cmd.Use = "patch ID" + cmd.Short = `Update service principal details.` + cmd.Long = `Update service principal details. + + Partially updates the details of a single service principal in the Databricks + account. + + Arguments: + ID: Unique ID in the Databricks workspace.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + a := cmdctx.AccountClient(ctx) + + if cmd.Flags().Changed("json") { + diags := patchJson.Unmarshal(&patchReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + patchReq.Id = args[0] + + err = a.ServicePrincipalsV2.Patch(ctx, patchReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range patchOverrides { + fn(cmd, &patchReq) + } + + return cmd +} + +// start update command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *iam.UpdateAccountServicePrincipalRequest, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq iam.UpdateAccountServicePrincipalRequest + var updateJson flags.JsonFlag + + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().BoolVar(&updateReq.Active, "active", updateReq.Active, `If this user is active.`) + cmd.Flags().StringVar(&updateReq.ApplicationId, "application-id", updateReq.ApplicationId, `UUID relating to the service principal.`) + cmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `String that represents a concatenation of given and family names.`) + cmd.Flags().StringVar(&updateReq.ExternalId, "external-id", updateReq.ExternalId, ``) + // TODO: array: roles + + cmd.Use = "update ID" + cmd.Short = `Replace service principal.` + cmd.Long = `Replace service principal. + + Updates the details of a single service principal. + + This action replaces the existing service principal with the same name. + + Arguments: + ID: Databricks service principal ID.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + a := cmdctx.AccountClient(ctx) + + if cmd.Flags().Changed("json") { + diags := updateJson.Unmarshal(&updateReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + updateReq.Id = args[0] + + err = a.ServicePrincipalsV2.Update(ctx, updateReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +// end service AccountServicePrincipalsV2 diff --git a/cmd/account/settings-v2/settings-v2.go b/cmd/account/settings-v2/settings-v2.go index 2f2a5fe307..2811187749 100755 --- a/cmd/account/settings-v2/settings-v2.go +++ b/cmd/account/settings-v2/settings-v2.go @@ -24,10 +24,7 @@ func New() *cobra.Command { Annotations: map[string]string{ "package": "settingsv2", }, - - // This service is being previewed; hide from help output. - Hidden: true, - RunE: root.ReportUnknownSubcommand, + RunE: root.ReportUnknownSubcommand, } // Add methods @@ -61,7 +58,9 @@ func newGetPublicAccountSetting() *cobra.Command { cmd.Short = `Get an account setting.` cmd.Long = `Get an account setting. - Get a setting value at account level` + Get a setting value at account level. See + :method:settingsv2/listaccountsettingsmetadata for list of setting available + via public APIs at account level.` cmd.Annotations = make(map[string]string) @@ -117,12 +116,9 @@ func newListAccountSettingsMetadata() *cobra.Command { cmd.Short = `List valid setting keys and their metadata.` cmd.Long = `List valid setting keys and their metadata. - List valid setting keys and metadata. These settings are available to - referenced via [GET - /api/2.1/settings/{name}](#~1api~1account~1settingsv2~1getpublicaccountsetting) - and [PATCH - /api/2.1/settings/{name}](#~1api~1account~1settingsv2~patchpublicaccountsetting) - APIs` + List valid setting keys and metadata. These settings are available to be + referenced via GET :method:settingsv2/getpublicaccountsetting and PATCH + :method:settingsv2/patchpublicaccountsetting APIs` cmd.Annotations = make(map[string]string) @@ -174,12 +170,10 @@ func newPatchPublicAccountSetting() *cobra.Command { // TODO: complex arg: aibi_dashboard_embedding_approved_domains // TODO: complex arg: automatic_cluster_update_workspace // TODO: complex arg: boolean_val - // TODO: complex arg: default_data_security_mode // TODO: complex arg: effective_aibi_dashboard_embedding_access_policy // TODO: complex arg: effective_aibi_dashboard_embedding_approved_domains // TODO: complex arg: effective_automatic_cluster_update_workspace // TODO: complex arg: effective_boolean_val - // TODO: complex arg: effective_default_data_security_mode // TODO: complex arg: effective_integer_val // TODO: complex arg: effective_personal_compute // TODO: complex arg: effective_restrict_workspace_admins @@ -194,7 +188,9 @@ func newPatchPublicAccountSetting() *cobra.Command { cmd.Short = `Update an account setting.` cmd.Long = `Update an account setting. - Patch a setting value at account level` + Patch a setting value at account level. See + :method:settingsv2/listaccountsettingsmetadata for list of setting available + via public APIs at account level.` cmd.Annotations = make(map[string]string) diff --git a/cmd/account/users-v2/users-v2.go b/cmd/account/users-v2/users-v2.go new file mode 100755 index 0000000000..7a44950b6d --- /dev/null +++ b/cmd/account/users-v2/users-v2.go @@ -0,0 +1,464 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package users_v2 + +import ( + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdctx" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/iam" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "users-v2", + Short: `User identities recognized by Databricks and represented by email addresses.`, + Long: `User identities recognized by Databricks and represented by email addresses. + + Databricks recommends using SCIM provisioning to sync users and groups + automatically from your identity provider to your Databricks account. SCIM + streamlines onboarding a new employee or team by using your identity provider + to create users and groups in Databricks account and give them the proper + level of access. When a user leaves your organization or no longer needs + access to Databricks account, admins can terminate the user in your identity + provider and that user’s account will also be removed from Databricks + account. This ensures a consistent offboarding process and prevents + unauthorized users from accessing sensitive data.`, + GroupID: "iam", + Annotations: map[string]string{ + "package": "iam", + }, + RunE: root.ReportUnknownSubcommand, + } + + // Add methods + cmd.AddCommand(newCreate()) + cmd.AddCommand(newDelete()) + cmd.AddCommand(newGet()) + cmd.AddCommand(newList()) + cmd.AddCommand(newPatch()) + cmd.AddCommand(newUpdate()) + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start create command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *iam.CreateAccountUserRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq iam.CreateAccountUserRequest + var createJson flags.JsonFlag + + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().BoolVar(&createReq.Active, "active", createReq.Active, `If this user is active.`) + cmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `String that represents a concatenation of given and family names.`) + // TODO: array: emails + cmd.Flags().StringVar(&createReq.ExternalId, "external-id", createReq.ExternalId, `External ID is not currently supported.`) + cmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Databricks user ID.`) + // TODO: complex arg: name + // TODO: array: roles + cmd.Flags().StringVar(&createReq.UserName, "user-name", createReq.UserName, `Email address of the Databricks user.`) + + cmd.Use = "create" + cmd.Short = `Create a new user.` + cmd.Long = `Create a new user. + + Creates a new user in the Databricks account. This new user will also be added + to the Databricks account.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(0) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + a := cmdctx.AccountClient(ctx) + + if cmd.Flags().Changed("json") { + diags := createJson.Unmarshal(&createReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + + response, err := a.UsersV2.Create(ctx, createReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +// start delete command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *iam.DeleteAccountUserRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq iam.DeleteAccountUserRequest + + cmd.Use = "delete ID" + cmd.Short = `Delete a user.` + cmd.Long = `Delete a user. + + Deletes a user. Deleting a user from a Databricks account also removes objects + associated with the user. + + Arguments: + ID: Unique ID for a user in the Databricks account.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + a := cmdctx.AccountClient(ctx) + + deleteReq.Id = args[0] + + err = a.UsersV2.Delete(ctx, deleteReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +// start get command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *iam.GetAccountUserRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq iam.GetAccountUserRequest + + cmd.Flags().StringVar(&getReq.Attributes, "attributes", getReq.Attributes, `Comma-separated list of attributes to return in response.`) + cmd.Flags().IntVar(&getReq.Count, "count", getReq.Count, `Desired number of results per page.`) + cmd.Flags().StringVar(&getReq.ExcludedAttributes, "excluded-attributes", getReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) + cmd.Flags().StringVar(&getReq.Filter, "filter", getReq.Filter, `Query by which the results have to be filtered.`) + cmd.Flags().StringVar(&getReq.SortBy, "sort-by", getReq.SortBy, `Attribute to sort the results.`) + cmd.Flags().Var(&getReq.SortOrder, "sort-order", `The order to sort the results. Supported values: [ascending, descending]`) + cmd.Flags().IntVar(&getReq.StartIndex, "start-index", getReq.StartIndex, `Specifies the index of the first result.`) + + cmd.Use = "get ID" + cmd.Short = `Get user details.` + cmd.Long = `Get user details. + + Gets information for a specific user in Databricks account. + + Arguments: + ID: Unique ID for a user in the Databricks account.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + a := cmdctx.AccountClient(ctx) + + getReq.Id = args[0] + + response, err := a.UsersV2.Get(ctx, getReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +// start list command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *iam.ListAccountUsersRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq iam.ListAccountUsersRequest + + cmd.Flags().StringVar(&listReq.Attributes, "attributes", listReq.Attributes, `Comma-separated list of attributes to return in response.`) + cmd.Flags().Int64Var(&listReq.Count, "count", listReq.Count, `Desired number of results per page.`) + cmd.Flags().StringVar(&listReq.ExcludedAttributes, "excluded-attributes", listReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) + cmd.Flags().StringVar(&listReq.Filter, "filter", listReq.Filter, `Query by which the results have to be filtered.`) + cmd.Flags().StringVar(&listReq.SortBy, "sort-by", listReq.SortBy, `Attribute to sort the results.`) + cmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order to sort the results. Supported values: [ascending, descending]`) + cmd.Flags().Int64Var(&listReq.StartIndex, "start-index", listReq.StartIndex, `Specifies the index of the first result.`) + + cmd.Use = "list" + cmd.Short = `List users.` + cmd.Long = `List users. + + Gets details for all the users associated with a Databricks account.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(0) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + a := cmdctx.AccountClient(ctx) + + response := a.UsersV2.List(ctx, listReq) + return cmdio.RenderIterator(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +// start patch command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var patchOverrides []func( + *cobra.Command, + *iam.PatchAccountUserRequest, +) + +func newPatch() *cobra.Command { + cmd := &cobra.Command{} + + var patchReq iam.PatchAccountUserRequest + var patchJson flags.JsonFlag + + cmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: Operations + // TODO: array: schemas + + cmd.Use = "patch ID" + cmd.Short = `Update user details.` + cmd.Long = `Update user details. + + Partially updates a user resource by applying the supplied operations on + specific user attributes. + + Arguments: + ID: Unique ID in the Databricks workspace.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + a := cmdctx.AccountClient(ctx) + + if cmd.Flags().Changed("json") { + diags := patchJson.Unmarshal(&patchReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + patchReq.Id = args[0] + + err = a.UsersV2.Patch(ctx, patchReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range patchOverrides { + fn(cmd, &patchReq) + } + + return cmd +} + +// start update command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *iam.UpdateAccountUserRequest, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq iam.UpdateAccountUserRequest + var updateJson flags.JsonFlag + + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().BoolVar(&updateReq.Active, "active", updateReq.Active, `If this user is active.`) + cmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `String that represents a concatenation of given and family names.`) + // TODO: array: emails + cmd.Flags().StringVar(&updateReq.ExternalId, "external-id", updateReq.ExternalId, `External ID is not currently supported.`) + // TODO: complex arg: name + // TODO: array: roles + cmd.Flags().StringVar(&updateReq.UserName, "user-name", updateReq.UserName, `Email address of the Databricks user.`) + + cmd.Use = "update ID" + cmd.Short = `Replace a user.` + cmd.Long = `Replace a user. + + Replaces a user's information with the data supplied in request. + + Arguments: + ID: Databricks user ID.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + a := cmdctx.AccountClient(ctx) + + if cmd.Flags().Changed("json") { + diags := updateJson.Unmarshal(&updateReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + updateReq.Id = args[0] + + err = a.UsersV2.Update(ctx, updateReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +// end service AccountUsersV2 diff --git a/cmd/workspace/alerts-v2/alerts-v2.go b/cmd/workspace/alerts-v2/alerts-v2.go index 87a0d0f70c..a02196ff0e 100755 --- a/cmd/workspace/alerts-v2/alerts-v2.go +++ b/cmd/workspace/alerts-v2/alerts-v2.go @@ -255,8 +255,8 @@ func newTrashAlert() *cobra.Command { var trashAlertReq sql.TrashAlertV2Request cmd.Use = "trash-alert ID" - cmd.Short = `Delete an alert.` - cmd.Long = `Delete an alert. + cmd.Short = `Delete an alert (legacy TrashAlert).` + cmd.Long = `Delete an alert (legacy TrashAlert). Moves an alert to the trash. Trashed alerts immediately disappear from list views, and can no longer trigger. You can restore a trashed alert through the diff --git a/cmd/workspace/cmd.go b/cmd/workspace/cmd.go index 5cbc87dccc..8be495af25 100755 --- a/cmd/workspace/cmd.go +++ b/cmd/workspace/cmd.go @@ -37,6 +37,7 @@ import ( external_lineage "github.com/databricks/cli/cmd/workspace/external-lineage" external_locations "github.com/databricks/cli/cmd/workspace/external-locations" external_metadata "github.com/databricks/cli/cmd/workspace/external-metadata" + feature_engineering "github.com/databricks/cli/cmd/workspace/feature-engineering" feature_store "github.com/databricks/cli/cmd/workspace/feature-store" forecasting "github.com/databricks/cli/cmd/workspace/forecasting" functions "github.com/databricks/cli/cmd/workspace/functions" @@ -44,7 +45,7 @@ import ( git_credentials "github.com/databricks/cli/cmd/workspace/git-credentials" global_init_scripts "github.com/databricks/cli/cmd/workspace/global-init-scripts" grants "github.com/databricks/cli/cmd/workspace/grants" - groups "github.com/databricks/cli/cmd/workspace/groups" + groups_v2 "github.com/databricks/cli/cmd/workspace/groups-v2" instance_pools "github.com/databricks/cli/cmd/workspace/instance-pools" instance_profiles "github.com/databricks/cli/cmd/workspace/instance-profiles" ip_access_lists "github.com/databricks/cli/cmd/workspace/ip-access-lists" @@ -91,7 +92,7 @@ import ( schemas "github.com/databricks/cli/cmd/workspace/schemas" secrets "github.com/databricks/cli/cmd/workspace/secrets" service_principal_secrets_proxy "github.com/databricks/cli/cmd/workspace/service-principal-secrets-proxy" - service_principals "github.com/databricks/cli/cmd/workspace/service-principals" + service_principals_v2 "github.com/databricks/cli/cmd/workspace/service-principals-v2" serving_endpoints "github.com/databricks/cli/cmd/workspace/serving-endpoints" settings "github.com/databricks/cli/cmd/workspace/settings" shares "github.com/databricks/cli/cmd/workspace/shares" @@ -104,7 +105,7 @@ import ( temporary_table_credentials "github.com/databricks/cli/cmd/workspace/temporary-table-credentials" token_management "github.com/databricks/cli/cmd/workspace/token-management" tokens "github.com/databricks/cli/cmd/workspace/tokens" - users "github.com/databricks/cli/cmd/workspace/users" + users_v2 "github.com/databricks/cli/cmd/workspace/users-v2" vector_search_endpoints "github.com/databricks/cli/cmd/workspace/vector-search-endpoints" vector_search_indexes "github.com/databricks/cli/cmd/workspace/vector-search-indexes" volumes "github.com/databricks/cli/cmd/workspace/volumes" @@ -112,8 +113,13 @@ import ( workspace "github.com/databricks/cli/cmd/workspace/workspace" workspace_bindings "github.com/databricks/cli/cmd/workspace/workspace-bindings" workspace_conf "github.com/databricks/cli/cmd/workspace/workspace-conf" + workspace_iam_v2 "github.com/databricks/cli/cmd/workspace/workspace-iam-v2" workspace_settings_v2 "github.com/databricks/cli/cmd/workspace/workspace-settings-v2" "github.com/spf13/cobra" + + groups "github.com/databricks/cli/cmd/workspace/groups" + service_principals "github.com/databricks/cli/cmd/workspace/service-principals" + users "github.com/databricks/cli/cmd/workspace/users" ) func All() []*cobra.Command { @@ -153,13 +159,14 @@ func All() []*cobra.Command { out = append(out, external_lineage.New()) out = append(out, external_locations.New()) out = append(out, external_metadata.New()) + out = append(out, feature_engineering.New()) out = append(out, feature_store.New()) out = append(out, functions.New()) out = append(out, genie.New()) out = append(out, git_credentials.New()) out = append(out, global_init_scripts.New()) out = append(out, grants.New()) - out = append(out, groups.New()) + out = append(out, groups_v2.New()) out = append(out, instance_pools.New()) out = append(out, instance_profiles.New()) out = append(out, ip_access_lists.New()) @@ -206,7 +213,7 @@ func All() []*cobra.Command { out = append(out, schemas.New()) out = append(out, secrets.New()) out = append(out, service_principal_secrets_proxy.New()) - out = append(out, service_principals.New()) + out = append(out, service_principals_v2.New()) out = append(out, serving_endpoints.New()) out = append(out, settings.New()) out = append(out, shares.New()) @@ -219,7 +226,7 @@ func All() []*cobra.Command { out = append(out, temporary_table_credentials.New()) out = append(out, token_management.New()) out = append(out, tokens.New()) - out = append(out, users.New()) + out = append(out, users_v2.New()) out = append(out, vector_search_endpoints.New()) out = append(out, vector_search_indexes.New()) out = append(out, volumes.New()) @@ -229,6 +236,11 @@ func All() []*cobra.Command { out = append(out, workspace_conf.New()) out = append(out, workspace_settings_v2.New()) out = append(out, forecasting.New()) + out = append(out, workspace_iam_v2.New()) + + out = append(out, groups.New()) + out = append(out, service_principals.New()) + out = append(out, users.New()) return out } diff --git a/cmd/workspace/database/database.go b/cmd/workspace/database/database.go index efca299716..6ce0723659 100755 --- a/cmd/workspace/database/database.go +++ b/cmd/workspace/database/database.go @@ -179,12 +179,12 @@ func newCreateDatabaseInstance() *cobra.Command { cmd.Flags().StringVar(&createDatabaseInstanceReq.DatabaseInstance.Capacity, "capacity", createDatabaseInstanceReq.DatabaseInstance.Capacity, `The sku of the instance.`) // TODO: array: child_instance_refs - cmd.Flags().BoolVar(&createDatabaseInstanceReq.DatabaseInstance.EnablePgNativeLogin, "enable-pg-native-login", createDatabaseInstanceReq.DatabaseInstance.EnablePgNativeLogin, `Whether the instance has PG native password login enabled.`) + cmd.Flags().BoolVar(&createDatabaseInstanceReq.DatabaseInstance.EnablePgNativeLogin, "enable-pg-native-login", createDatabaseInstanceReq.DatabaseInstance.EnablePgNativeLogin, `Whether to enable PG native password login on the instance.`) cmd.Flags().BoolVar(&createDatabaseInstanceReq.DatabaseInstance.EnableReadableSecondaries, "enable-readable-secondaries", createDatabaseInstanceReq.DatabaseInstance.EnableReadableSecondaries, `Whether to enable secondaries to serve read-only traffic.`) cmd.Flags().IntVar(&createDatabaseInstanceReq.DatabaseInstance.NodeCount, "node-count", createDatabaseInstanceReq.DatabaseInstance.NodeCount, `The number of nodes in the instance, composed of 1 primary and 0 or more secondaries.`) // TODO: complex arg: parent_instance_ref cmd.Flags().IntVar(&createDatabaseInstanceReq.DatabaseInstance.RetentionWindowInDays, "retention-window-in-days", createDatabaseInstanceReq.DatabaseInstance.RetentionWindowInDays, `The retention window for the instance.`) - cmd.Flags().BoolVar(&createDatabaseInstanceReq.DatabaseInstance.Stopped, "stopped", createDatabaseInstanceReq.DatabaseInstance.Stopped, `Whether the instance is stopped.`) + cmd.Flags().BoolVar(&createDatabaseInstanceReq.DatabaseInstance.Stopped, "stopped", createDatabaseInstanceReq.DatabaseInstance.Stopped, `Whether to stop the instance.`) cmd.Use = "create-database-instance NAME" cmd.Short = `Create a Database Instance.` @@ -571,7 +571,7 @@ func newDeleteDatabaseInstance() *cobra.Command { var deleteDatabaseInstanceReq database.DeleteDatabaseInstanceRequest cmd.Flags().BoolVar(&deleteDatabaseInstanceReq.Force, "force", deleteDatabaseInstanceReq.Force, `By default, a instance cannot be deleted if it has descendant instances created via PITR.`) - cmd.Flags().BoolVar(&deleteDatabaseInstanceReq.Purge, "purge", deleteDatabaseInstanceReq.Purge, `Note purge=false is in development.`) + cmd.Flags().BoolVar(&deleteDatabaseInstanceReq.Purge, "purge", deleteDatabaseInstanceReq.Purge, `Deprecated.`) cmd.Use = "delete-database-instance NAME" cmd.Short = `Delete a Database Instance.` @@ -1500,12 +1500,12 @@ func newUpdateDatabaseInstance() *cobra.Command { cmd.Flags().StringVar(&updateDatabaseInstanceReq.DatabaseInstance.Capacity, "capacity", updateDatabaseInstanceReq.DatabaseInstance.Capacity, `The sku of the instance.`) // TODO: array: child_instance_refs - cmd.Flags().BoolVar(&updateDatabaseInstanceReq.DatabaseInstance.EnablePgNativeLogin, "enable-pg-native-login", updateDatabaseInstanceReq.DatabaseInstance.EnablePgNativeLogin, `Whether the instance has PG native password login enabled.`) + cmd.Flags().BoolVar(&updateDatabaseInstanceReq.DatabaseInstance.EnablePgNativeLogin, "enable-pg-native-login", updateDatabaseInstanceReq.DatabaseInstance.EnablePgNativeLogin, `Whether to enable PG native password login on the instance.`) cmd.Flags().BoolVar(&updateDatabaseInstanceReq.DatabaseInstance.EnableReadableSecondaries, "enable-readable-secondaries", updateDatabaseInstanceReq.DatabaseInstance.EnableReadableSecondaries, `Whether to enable secondaries to serve read-only traffic.`) cmd.Flags().IntVar(&updateDatabaseInstanceReq.DatabaseInstance.NodeCount, "node-count", updateDatabaseInstanceReq.DatabaseInstance.NodeCount, `The number of nodes in the instance, composed of 1 primary and 0 or more secondaries.`) // TODO: complex arg: parent_instance_ref cmd.Flags().IntVar(&updateDatabaseInstanceReq.DatabaseInstance.RetentionWindowInDays, "retention-window-in-days", updateDatabaseInstanceReq.DatabaseInstance.RetentionWindowInDays, `The retention window for the instance.`) - cmd.Flags().BoolVar(&updateDatabaseInstanceReq.DatabaseInstance.Stopped, "stopped", updateDatabaseInstanceReq.DatabaseInstance.Stopped, `Whether the instance is stopped.`) + cmd.Flags().BoolVar(&updateDatabaseInstanceReq.DatabaseInstance.Stopped, "stopped", updateDatabaseInstanceReq.DatabaseInstance.Stopped, `Whether to stop the instance.`) cmd.Use = "update-database-instance NAME UPDATE_MASK" cmd.Short = `Update a Database Instance.` diff --git a/cmd/workspace/feature-engineering/feature-engineering.go b/cmd/workspace/feature-engineering/feature-engineering.go new file mode 100755 index 0000000000..e69ce789d0 --- /dev/null +++ b/cmd/workspace/feature-engineering/feature-engineering.go @@ -0,0 +1,435 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package feature_engineering + +import ( + "fmt" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdctx" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/ml" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "feature-engineering", + Short: `[description].`, + Long: `[description]`, + GroupID: "ml", + Annotations: map[string]string{ + "package": "ml", + }, + + // This service is being previewed; hide from help output. + Hidden: true, + RunE: root.ReportUnknownSubcommand, + } + + // Add methods + cmd.AddCommand(newCreateFeature()) + cmd.AddCommand(newDeleteFeature()) + cmd.AddCommand(newGetFeature()) + cmd.AddCommand(newListFeatures()) + cmd.AddCommand(newUpdateFeature()) + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start create-feature command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createFeatureOverrides []func( + *cobra.Command, + *ml.CreateFeatureRequest, +) + +func newCreateFeature() *cobra.Command { + cmd := &cobra.Command{} + + var createFeatureReq ml.CreateFeatureRequest + createFeatureReq.Feature = ml.Feature{} + var createFeatureJson flags.JsonFlag + + cmd.Flags().Var(&createFeatureJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().StringVar(&createFeatureReq.Feature.Description, "description", createFeatureReq.Feature.Description, `The description of the feature.`) + + cmd.Use = "create-feature FULL_NAME SOURCE INPUTS FUNCTION TIME_WINDOW" + cmd.Short = `Create a feature.` + cmd.Long = `Create a feature. + + Create a Feature. + + Arguments: + FULL_NAME: The full three-part name (catalog, schema, name) of the feature. + SOURCE: The data source of the feature. + INPUTS: The input columns from which the feature is computed. + FUNCTION: The function by which the feature is computed. + TIME_WINDOW: The time window in which the feature is computed.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + if cmd.Flags().Changed("json") { + err := root.ExactArgs(0)(cmd, args) + if err != nil { + return fmt.Errorf("when --json flag is specified, no positional arguments are required. Provide 'full_name', 'source', 'inputs', 'function', 'time_window' in your JSON input") + } + return nil + } + check := root.ExactArgs(5) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := createFeatureJson.Unmarshal(&createFeatureReq.Feature) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + if !cmd.Flags().Changed("json") { + createFeatureReq.Feature.FullName = args[0] + } + if !cmd.Flags().Changed("json") { + _, err = fmt.Sscan(args[1], &createFeatureReq.Feature.Source) + if err != nil { + return fmt.Errorf("invalid SOURCE: %s", args[1]) + } + } + if !cmd.Flags().Changed("json") { + _, err = fmt.Sscan(args[2], &createFeatureReq.Feature.Inputs) + if err != nil { + return fmt.Errorf("invalid INPUTS: %s", args[2]) + } + } + if !cmd.Flags().Changed("json") { + _, err = fmt.Sscan(args[3], &createFeatureReq.Feature.Function) + if err != nil { + return fmt.Errorf("invalid FUNCTION: %s", args[3]) + } + } + if !cmd.Flags().Changed("json") { + _, err = fmt.Sscan(args[4], &createFeatureReq.Feature.TimeWindow) + if err != nil { + return fmt.Errorf("invalid TIME_WINDOW: %s", args[4]) + } + } + + response, err := w.FeatureEngineering.CreateFeature(ctx, createFeatureReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createFeatureOverrides { + fn(cmd, &createFeatureReq) + } + + return cmd +} + +// start delete-feature command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteFeatureOverrides []func( + *cobra.Command, + *ml.DeleteFeatureRequest, +) + +func newDeleteFeature() *cobra.Command { + cmd := &cobra.Command{} + + var deleteFeatureReq ml.DeleteFeatureRequest + + cmd.Use = "delete-feature FULL_NAME" + cmd.Short = `Delete a feature.` + cmd.Long = `Delete a feature. + + Delete a Feature. + + Arguments: + FULL_NAME: Name of the feature to delete.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + deleteFeatureReq.FullName = args[0] + + err = w.FeatureEngineering.DeleteFeature(ctx, deleteFeatureReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteFeatureOverrides { + fn(cmd, &deleteFeatureReq) + } + + return cmd +} + +// start get-feature command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getFeatureOverrides []func( + *cobra.Command, + *ml.GetFeatureRequest, +) + +func newGetFeature() *cobra.Command { + cmd := &cobra.Command{} + + var getFeatureReq ml.GetFeatureRequest + + cmd.Use = "get-feature FULL_NAME" + cmd.Short = `Get a feature.` + cmd.Long = `Get a feature. + + Get a Feature. + + Arguments: + FULL_NAME: Name of the feature to get.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + getFeatureReq.FullName = args[0] + + response, err := w.FeatureEngineering.GetFeature(ctx, getFeatureReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getFeatureOverrides { + fn(cmd, &getFeatureReq) + } + + return cmd +} + +// start list-features command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listFeaturesOverrides []func( + *cobra.Command, + *ml.ListFeaturesRequest, +) + +func newListFeatures() *cobra.Command { + cmd := &cobra.Command{} + + var listFeaturesReq ml.ListFeaturesRequest + + cmd.Flags().IntVar(&listFeaturesReq.PageSize, "page-size", listFeaturesReq.PageSize, `The maximum number of results to return.`) + cmd.Flags().StringVar(&listFeaturesReq.PageToken, "page-token", listFeaturesReq.PageToken, `Pagination token to go to the next page based on a previous query.`) + + cmd.Use = "list-features" + cmd.Short = `List features.` + cmd.Long = `List features. + + List Features.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(0) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + response := w.FeatureEngineering.ListFeatures(ctx, listFeaturesReq) + return cmdio.RenderIterator(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listFeaturesOverrides { + fn(cmd, &listFeaturesReq) + } + + return cmd +} + +// start update-feature command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateFeatureOverrides []func( + *cobra.Command, + *ml.UpdateFeatureRequest, +) + +func newUpdateFeature() *cobra.Command { + cmd := &cobra.Command{} + + var updateFeatureReq ml.UpdateFeatureRequest + updateFeatureReq.Feature = ml.Feature{} + var updateFeatureJson flags.JsonFlag + + cmd.Flags().Var(&updateFeatureJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().StringVar(&updateFeatureReq.Feature.Description, "description", updateFeatureReq.Feature.Description, `The description of the feature.`) + + cmd.Use = "update-feature FULL_NAME UPDATE_MASK SOURCE INPUTS FUNCTION TIME_WINDOW" + cmd.Short = `Update a feature's description (all other fields are immutable).` + cmd.Long = `Update a feature's description (all other fields are immutable). + + Update a Feature. + + Arguments: + FULL_NAME: The full three-part name (catalog, schema, name) of the feature. + UPDATE_MASK: The list of fields to update. + SOURCE: The data source of the feature. + INPUTS: The input columns from which the feature is computed. + FUNCTION: The function by which the feature is computed. + TIME_WINDOW: The time window in which the feature is computed.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + if cmd.Flags().Changed("json") { + err := root.ExactArgs(2)(cmd, args) + if err != nil { + return fmt.Errorf("when --json flag is specified, provide only FULL_NAME, UPDATE_MASK as positional arguments. Provide 'full_name', 'source', 'inputs', 'function', 'time_window' in your JSON input") + } + return nil + } + check := root.ExactArgs(6) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := updateFeatureJson.Unmarshal(&updateFeatureReq.Feature) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + updateFeatureReq.FullName = args[0] + updateFeatureReq.UpdateMask = args[1] + if !cmd.Flags().Changed("json") { + _, err = fmt.Sscan(args[2], &updateFeatureReq.Feature.Source) + if err != nil { + return fmt.Errorf("invalid SOURCE: %s", args[2]) + } + } + if !cmd.Flags().Changed("json") { + _, err = fmt.Sscan(args[3], &updateFeatureReq.Feature.Inputs) + if err != nil { + return fmt.Errorf("invalid INPUTS: %s", args[3]) + } + } + if !cmd.Flags().Changed("json") { + _, err = fmt.Sscan(args[4], &updateFeatureReq.Feature.Function) + if err != nil { + return fmt.Errorf("invalid FUNCTION: %s", args[4]) + } + } + if !cmd.Flags().Changed("json") { + _, err = fmt.Sscan(args[5], &updateFeatureReq.Feature.TimeWindow) + if err != nil { + return fmt.Errorf("invalid TIME_WINDOW: %s", args[5]) + } + } + + response, err := w.FeatureEngineering.UpdateFeature(ctx, updateFeatureReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateFeatureOverrides { + fn(cmd, &updateFeatureReq) + } + + return cmd +} + +// end service FeatureEngineering diff --git a/cmd/workspace/genie/genie.go b/cmd/workspace/genie/genie.go index 5a0a5f046c..d2a402342b 100755 --- a/cmd/workspace/genie/genie.go +++ b/cmd/workspace/genie/genie.go @@ -248,9 +248,6 @@ func newDeleteConversationMessage() *cobra.Command { CONVERSATION_ID: The ID associated with the conversation. MESSAGE_ID: The ID associated with the message to delete.` - // This command is being previewed; hide from help output. - cmd.Hidden = true - cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { @@ -752,9 +749,6 @@ func newListConversationMessages() *cobra.Command { SPACE_ID: The ID associated with the Genie space where the conversation is located CONVERSATION_ID: The ID of the conversation to list messages from` - // This command is being previewed; hide from help output. - cmd.Hidden = true - cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { @@ -920,8 +914,6 @@ func newSendMessageFeedback() *cobra.Command { cmd.Flags().Var(&sendMessageFeedbackJson, "json", `either inline JSON string or @path/to/file.json with request body`) - cmd.Flags().StringVar(&sendMessageFeedbackReq.Comment, "comment", sendMessageFeedbackReq.Comment, `Optional text feedback that will be stored as a comment.`) - cmd.Use = "send-message-feedback SPACE_ID CONVERSATION_ID MESSAGE_ID RATING" cmd.Short = `Send message feedback.` cmd.Long = `Send message feedback. @@ -935,9 +927,6 @@ func newSendMessageFeedback() *cobra.Command { RATING: The rating (POSITIVE, NEGATIVE, or NONE). Supported values: [NEGATIVE, NONE, POSITIVE]` - // This command is being previewed; hide from help output. - cmd.Hidden = true - cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { diff --git a/cmd/workspace/groups-v2/groups-v2.go b/cmd/workspace/groups-v2/groups-v2.go new file mode 100755 index 0000000000..580e5d1952 --- /dev/null +++ b/cmd/workspace/groups-v2/groups-v2.go @@ -0,0 +1,452 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package groups_v2 + +import ( + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdctx" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/iam" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "groups-v2", + Short: `Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects.`, + Long: `Groups simplify identity management, making it easier to assign access to + Databricks workspace, data, and other securable objects. + + It is best practice to assign access to workspaces and access-control policies + in Unity Catalog to groups, instead of to users individually. All Databricks + workspace identities can be assigned as members of groups, and members inherit + permissions that are assigned to their group.`, + GroupID: "iam", + Annotations: map[string]string{ + "package": "iam", + }, + RunE: root.ReportUnknownSubcommand, + } + + // Add methods + cmd.AddCommand(newCreate()) + cmd.AddCommand(newDelete()) + cmd.AddCommand(newGet()) + cmd.AddCommand(newList()) + cmd.AddCommand(newPatch()) + cmd.AddCommand(newUpdate()) + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start create command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *iam.CreateGroupRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq iam.CreateGroupRequest + var createJson flags.JsonFlag + + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `String that represents a human-readable group name.`) + // TODO: array: entitlements + cmd.Flags().StringVar(&createReq.ExternalId, "external-id", createReq.ExternalId, ``) + // TODO: array: groups + cmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Databricks group ID.`) + // TODO: array: members + // TODO: complex arg: meta + // TODO: array: roles + // TODO: array: schemas + + cmd.Use = "create" + cmd.Short = `Create a new group.` + cmd.Long = `Create a new group. + + Creates a group in the Databricks workspace with a unique name, using the + supplied group details.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(0) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := createJson.Unmarshal(&createReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + + response, err := w.GroupsV2.Create(ctx, createReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +// start delete command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *iam.DeleteGroupRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq iam.DeleteGroupRequest + + cmd.Use = "delete ID" + cmd.Short = `Delete a group.` + cmd.Long = `Delete a group. + + Deletes a group from the Databricks workspace. + + Arguments: + ID: Unique ID for a group in the Databricks workspace.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + deleteReq.Id = args[0] + + err = w.GroupsV2.Delete(ctx, deleteReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +// start get command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *iam.GetGroupRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq iam.GetGroupRequest + + cmd.Use = "get ID" + cmd.Short = `Get group details.` + cmd.Long = `Get group details. + + Gets the information for a specific group in the Databricks workspace. + + Arguments: + ID: Unique ID for a group in the Databricks workspace.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + getReq.Id = args[0] + + response, err := w.GroupsV2.Get(ctx, getReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +// start list command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *iam.ListGroupsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq iam.ListGroupsRequest + + cmd.Flags().StringVar(&listReq.Attributes, "attributes", listReq.Attributes, `Comma-separated list of attributes to return in response.`) + cmd.Flags().Int64Var(&listReq.Count, "count", listReq.Count, `Desired number of results per page.`) + cmd.Flags().StringVar(&listReq.ExcludedAttributes, "excluded-attributes", listReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) + cmd.Flags().StringVar(&listReq.Filter, "filter", listReq.Filter, `Query by which the results have to be filtered.`) + cmd.Flags().StringVar(&listReq.SortBy, "sort-by", listReq.SortBy, `Attribute to sort the results.`) + cmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order to sort the results. Supported values: [ascending, descending]`) + cmd.Flags().Int64Var(&listReq.StartIndex, "start-index", listReq.StartIndex, `Specifies the index of the first result.`) + + cmd.Use = "list" + cmd.Short = `List group details.` + cmd.Long = `List group details. + + Gets all details of the groups associated with the Databricks workspace.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(0) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + response := w.GroupsV2.List(ctx, listReq) + return cmdio.RenderIterator(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +// start patch command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var patchOverrides []func( + *cobra.Command, + *iam.PatchGroupRequest, +) + +func newPatch() *cobra.Command { + cmd := &cobra.Command{} + + var patchReq iam.PatchGroupRequest + var patchJson flags.JsonFlag + + cmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: Operations + // TODO: array: schemas + + cmd.Use = "patch ID" + cmd.Short = `Update group details.` + cmd.Long = `Update group details. + + Partially updates the details of a group. + + Arguments: + ID: Unique ID in the Databricks workspace.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := patchJson.Unmarshal(&patchReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + patchReq.Id = args[0] + + err = w.GroupsV2.Patch(ctx, patchReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range patchOverrides { + fn(cmd, &patchReq) + } + + return cmd +} + +// start update command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *iam.UpdateGroupRequest, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq iam.UpdateGroupRequest + var updateJson flags.JsonFlag + + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `String that represents a human-readable group name.`) + // TODO: array: entitlements + cmd.Flags().StringVar(&updateReq.ExternalId, "external-id", updateReq.ExternalId, ``) + // TODO: array: groups + // TODO: array: members + // TODO: complex arg: meta + // TODO: array: roles + // TODO: array: schemas + + cmd.Use = "update ID" + cmd.Short = `Replace a group.` + cmd.Long = `Replace a group. + + Updates the details of a group by replacing the entire group entity. + + Arguments: + ID: Databricks group ID` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := updateJson.Unmarshal(&updateReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + updateReq.Id = args[0] + + err = w.GroupsV2.Update(ctx, updateReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +// end service GroupsV2 diff --git a/cmd/workspace/groups.go b/cmd/workspace/groups.go index e7f2983dbe..51444fcd57 100644 --- a/cmd/workspace/groups.go +++ b/cmd/workspace/groups.go @@ -92,5 +92,9 @@ func Groups() []cobra.Group { ID: "oauth2", Title: "OAuth", }, + { + ID: "iamv2", + Title: "Identity and Access Management (v2)", + }, } } diff --git a/cmd/workspace/service-principals-v2/service-principals-v2.go b/cmd/workspace/service-principals-v2/service-principals-v2.go new file mode 100755 index 0000000000..a0377fd5a6 --- /dev/null +++ b/cmd/workspace/service-principals-v2/service-principals-v2.go @@ -0,0 +1,454 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package service_principals_v2 + +import ( + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdctx" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/iam" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "service-principals-v2", + Short: `Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.`, + Long: `Identities for use with jobs, automated tools, and systems such as scripts, + apps, and CI/CD platforms. Databricks recommends creating service principals + to run production jobs or modify production data. If all processes that act on + production data run with service principals, interactive users do not need any + write, delete, or modify privileges in production. This eliminates the risk of + a user overwriting production data by accident.`, + GroupID: "iam", + Annotations: map[string]string{ + "package": "iam", + }, + RunE: root.ReportUnknownSubcommand, + } + + // Add methods + cmd.AddCommand(newCreate()) + cmd.AddCommand(newDelete()) + cmd.AddCommand(newGet()) + cmd.AddCommand(newList()) + cmd.AddCommand(newPatch()) + cmd.AddCommand(newUpdate()) + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start create command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *iam.CreateServicePrincipalRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq iam.CreateServicePrincipalRequest + var createJson flags.JsonFlag + + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().BoolVar(&createReq.Active, "active", createReq.Active, `If this user is active.`) + cmd.Flags().StringVar(&createReq.ApplicationId, "application-id", createReq.ApplicationId, `UUID relating to the service principal.`) + cmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `String that represents a concatenation of given and family names.`) + // TODO: array: entitlements + cmd.Flags().StringVar(&createReq.ExternalId, "external-id", createReq.ExternalId, ``) + // TODO: array: groups + cmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Databricks service principal ID.`) + // TODO: array: roles + // TODO: array: schemas + + cmd.Use = "create" + cmd.Short = `Create a service principal.` + cmd.Long = `Create a service principal. + + Creates a new service principal in the Databricks workspace.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(0) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := createJson.Unmarshal(&createReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + + response, err := w.ServicePrincipalsV2.Create(ctx, createReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +// start delete command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *iam.DeleteServicePrincipalRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq iam.DeleteServicePrincipalRequest + + cmd.Use = "delete ID" + cmd.Short = `Delete a service principal.` + cmd.Long = `Delete a service principal. + + Delete a single service principal in the Databricks workspace. + + Arguments: + ID: Unique ID for a service principal in the Databricks workspace.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + deleteReq.Id = args[0] + + err = w.ServicePrincipalsV2.Delete(ctx, deleteReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +// start get command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *iam.GetServicePrincipalRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq iam.GetServicePrincipalRequest + + cmd.Use = "get ID" + cmd.Short = `Get service principal details.` + cmd.Long = `Get service principal details. + + Gets the details for a single service principal define in the Databricks + workspace. + + Arguments: + ID: Unique ID for a service principal in the Databricks workspace.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + getReq.Id = args[0] + + response, err := w.ServicePrincipalsV2.Get(ctx, getReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +// start list command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *iam.ListServicePrincipalsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq iam.ListServicePrincipalsRequest + + cmd.Flags().StringVar(&listReq.Attributes, "attributes", listReq.Attributes, `Comma-separated list of attributes to return in response.`) + cmd.Flags().Int64Var(&listReq.Count, "count", listReq.Count, `Desired number of results per page.`) + cmd.Flags().StringVar(&listReq.ExcludedAttributes, "excluded-attributes", listReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) + cmd.Flags().StringVar(&listReq.Filter, "filter", listReq.Filter, `Query by which the results have to be filtered.`) + cmd.Flags().StringVar(&listReq.SortBy, "sort-by", listReq.SortBy, `Attribute to sort the results.`) + cmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order to sort the results. Supported values: [ascending, descending]`) + cmd.Flags().Int64Var(&listReq.StartIndex, "start-index", listReq.StartIndex, `Specifies the index of the first result.`) + + cmd.Use = "list" + cmd.Short = `List service principals.` + cmd.Long = `List service principals. + + Gets the set of service principals associated with a Databricks workspace.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(0) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + response := w.ServicePrincipalsV2.List(ctx, listReq) + return cmdio.RenderIterator(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +// start patch command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var patchOverrides []func( + *cobra.Command, + *iam.PatchServicePrincipalRequest, +) + +func newPatch() *cobra.Command { + cmd := &cobra.Command{} + + var patchReq iam.PatchServicePrincipalRequest + var patchJson flags.JsonFlag + + cmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: Operations + // TODO: array: schemas + + cmd.Use = "patch ID" + cmd.Short = `Update service principal details.` + cmd.Long = `Update service principal details. + + Partially updates the details of a single service principal in the Databricks + workspace. + + Arguments: + ID: Unique ID in the Databricks workspace.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := patchJson.Unmarshal(&patchReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + patchReq.Id = args[0] + + err = w.ServicePrincipalsV2.Patch(ctx, patchReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range patchOverrides { + fn(cmd, &patchReq) + } + + return cmd +} + +// start update command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *iam.UpdateServicePrincipalRequest, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq iam.UpdateServicePrincipalRequest + var updateJson flags.JsonFlag + + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().BoolVar(&updateReq.Active, "active", updateReq.Active, `If this user is active.`) + cmd.Flags().StringVar(&updateReq.ApplicationId, "application-id", updateReq.ApplicationId, `UUID relating to the service principal.`) + cmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `String that represents a concatenation of given and family names.`) + // TODO: array: entitlements + cmd.Flags().StringVar(&updateReq.ExternalId, "external-id", updateReq.ExternalId, ``) + // TODO: array: groups + // TODO: array: roles + // TODO: array: schemas + + cmd.Use = "update ID" + cmd.Short = `Replace service principal.` + cmd.Long = `Replace service principal. + + Updates the details of a single service principal. + + This action replaces the existing service principal with the same name. + + Arguments: + ID: Databricks service principal ID.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := updateJson.Unmarshal(&updateReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + updateReq.Id = args[0] + + err = w.ServicePrincipalsV2.Update(ctx, updateReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +// end service ServicePrincipalsV2 diff --git a/cmd/workspace/shares/overrides.go b/cmd/workspace/shares/overrides.go new file mode 100644 index 0000000000..a959f45cdb --- /dev/null +++ b/cmd/workspace/shares/overrides.go @@ -0,0 +1,59 @@ +package shares + +import ( + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdctx" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/sharing" + "github.com/spf13/cobra" +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq sharing.ListSharesRequest + + cmd.Flags().IntVar(&listReq.MaxResults, "max-results", listReq.MaxResults, `Maximum number of shares to return.`) + cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `Opaque pagination token to go to next page based on previous query.`) + + cmd.Use = "list" + cmd.Short = `List shares (Deprecated).` + cmd.Long = `List shares (Deprecated). + + Gets an array of data object shares from the metastore. The caller must be a + metastore admin or the owner of the share. There is no guarantee of a specific + ordering of the elements in the array.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(0) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + //nolint:staticcheck // this API is deprecated but we still need to expose this in the CLI. + response := w.Shares.List(ctx, listReq) + return cmdio.RenderIterator(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + return cmd +} + +func cmdOverride(cmd *cobra.Command) { + // List command override is added here because the command is deprecated + // and removed from the API definition. Use `list-shares` instead. + cmd.AddCommand(newList()) +} + +func init() { + cmdOverrides = append(cmdOverrides, cmdOverride) +} diff --git a/cmd/workspace/shares/shares.go b/cmd/workspace/shares/shares.go index 4a5aa282b1..803b1f05cc 100755 --- a/cmd/workspace/shares/shares.go +++ b/cmd/workspace/shares/shares.go @@ -37,7 +37,7 @@ func New() *cobra.Command { cmd.AddCommand(newCreate()) cmd.AddCommand(newDelete()) cmd.AddCommand(newGet()) - cmd.AddCommand(newList()) + cmd.AddCommand(newListShares()) cmd.AddCommand(newSharePermissions()) cmd.AddCommand(newUpdate()) cmd.AddCommand(newUpdatePermissions()) @@ -251,24 +251,24 @@ func newGet() *cobra.Command { return cmd } -// start list command +// start list-shares command // Slice with functions to override default command behavior. // Functions can be added from the `init()` function in manually curated files in this directory. -var listOverrides []func( +var listSharesOverrides []func( *cobra.Command, - *sharing.ListSharesRequest, + *sharing.SharesListRequest, ) -func newList() *cobra.Command { +func newListShares() *cobra.Command { cmd := &cobra.Command{} - var listReq sharing.ListSharesRequest + var listSharesReq sharing.SharesListRequest - cmd.Flags().IntVar(&listReq.MaxResults, "max-results", listReq.MaxResults, `Maximum number of shares to return.`) - cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `Opaque pagination token to go to next page based on previous query.`) + cmd.Flags().IntVar(&listSharesReq.MaxResults, "max-results", listSharesReq.MaxResults, `Maximum number of shares to return.`) + cmd.Flags().StringVar(&listSharesReq.PageToken, "page-token", listSharesReq.PageToken, `Opaque pagination token to go to next page based on previous query.`) - cmd.Use = "list" + cmd.Use = "list-shares" cmd.Short = `List shares.` cmd.Long = `List shares. @@ -288,7 +288,7 @@ func newList() *cobra.Command { ctx := cmd.Context() w := cmdctx.WorkspaceClient(ctx) - response := w.Shares.List(ctx, listReq) + response := w.Shares.ListShares(ctx, listSharesReq) return cmdio.RenderIterator(ctx, response) } @@ -297,8 +297,8 @@ func newList() *cobra.Command { cmd.ValidArgsFunction = cobra.NoFileCompletions // Apply optional overrides to this command. - for _, fn := range listOverrides { - fn(cmd, &listReq) + for _, fn := range listSharesOverrides { + fn(cmd, &listSharesReq) } return cmd diff --git a/cmd/workspace/tag-policies/tag-policies.go b/cmd/workspace/tag-policies/tag-policies.go index a54125fa92..ae78bf28a8 100755 --- a/cmd/workspace/tag-policies/tag-policies.go +++ b/cmd/workspace/tag-policies/tag-policies.go @@ -19,9 +19,13 @@ var cmdOverrides []func(*cobra.Command) func New() *cobra.Command { cmd := &cobra.Command{ - Use: "tag-policies", - Short: `The Tag Policy API allows you to manage tag policies in Databricks.`, - Long: `The Tag Policy API allows you to manage tag policies in Databricks.`, + Use: "tag-policies", + Short: `The Tag Policy API allows you to manage policies for governed tags in Databricks.`, + Long: `The Tag Policy API allows you to manage policies for governed tags in + Databricks. Permissions for tag policies can be managed using the [Account + Access Control Proxy API]. + + [Account Access Control Proxy API]: https://docs.databricks.com/api/workspace/accountaccesscontrolproxy`, GroupID: "tags", Annotations: map[string]string{ "package": "tags", @@ -69,7 +73,7 @@ func newCreateTagPolicy() *cobra.Command { cmd.Short = `Create a new tag policy.` cmd.Long = `Create a new tag policy. - Creates a new tag policy.` + Creates a new tag policy, making the associated tag key governed.` cmd.Annotations = make(map[string]string) @@ -143,7 +147,8 @@ func newDeleteTagPolicy() *cobra.Command { cmd.Short = `Delete a tag policy.` cmd.Long = `Delete a tag policy. - Deletes a tag policy by its key.` + Deletes a tag policy by its associated governed tag's key, leaving that tag + key ungoverned.` cmd.Annotations = make(map[string]string) @@ -196,7 +201,7 @@ func newGetTagPolicy() *cobra.Command { cmd.Short = `Get a tag policy.` cmd.Long = `Get a tag policy. - Gets a single tag policy by its key.` + Gets a single tag policy by its associated governed tag's key.` cmd.Annotations = make(map[string]string) @@ -252,7 +257,7 @@ func newListTagPolicies() *cobra.Command { cmd.Short = `List tag policies.` cmd.Long = `List tag policies. - Lists all tag policies in the account.` + Lists the tag policies for all governed tags in the account.` cmd.Annotations = make(map[string]string) @@ -307,7 +312,7 @@ func newUpdateTagPolicy() *cobra.Command { cmd.Short = `Update an existing tag policy.` cmd.Long = `Update an existing tag policy. - Updates an existing tag policy. + Updates an existing tag policy for a single governed tag. Arguments: TAG_KEY: diff --git a/cmd/workspace/users-v2/users-v2.go b/cmd/workspace/users-v2/users-v2.go new file mode 100755 index 0000000000..c7484f091c --- /dev/null +++ b/cmd/workspace/users-v2/users-v2.go @@ -0,0 +1,708 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package users_v2 + +import ( + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdctx" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/iam" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "users-v2", + Short: `User identities recognized by Databricks and represented by email addresses.`, + Long: `User identities recognized by Databricks and represented by email addresses. + + Databricks recommends using SCIM provisioning to sync users and groups + automatically from your identity provider to your Databricks workspace. SCIM + streamlines onboarding a new employee or team by using your identity provider + to create users and groups in Databricks workspace and give them the proper + level of access. When a user leaves your organization or no longer needs + access to Databricks workspace, admins can terminate the user in your identity + provider and that user’s account will also be removed from Databricks + workspace. This ensures a consistent offboarding process and prevents + unauthorized users from accessing sensitive data.`, + GroupID: "iam", + Annotations: map[string]string{ + "package": "iam", + }, + RunE: root.ReportUnknownSubcommand, + } + + // Add methods + cmd.AddCommand(newCreate()) + cmd.AddCommand(newDelete()) + cmd.AddCommand(newGet()) + cmd.AddCommand(newGetPermissionLevels()) + cmd.AddCommand(newGetPermissions()) + cmd.AddCommand(newList()) + cmd.AddCommand(newPatch()) + cmd.AddCommand(newSetPermissions()) + cmd.AddCommand(newUpdate()) + cmd.AddCommand(newUpdatePermissions()) + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start create command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *iam.CreateUserRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq iam.CreateUserRequest + var createJson flags.JsonFlag + + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().BoolVar(&createReq.Active, "active", createReq.Active, `If this user is active.`) + cmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `String that represents a concatenation of given and family names.`) + // TODO: array: emails + // TODO: array: entitlements + cmd.Flags().StringVar(&createReq.ExternalId, "external-id", createReq.ExternalId, `External ID is not currently supported.`) + // TODO: array: groups + cmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Databricks user ID.`) + // TODO: complex arg: name + // TODO: array: roles + // TODO: array: schemas + cmd.Flags().StringVar(&createReq.UserName, "user-name", createReq.UserName, `Email address of the Databricks user.`) + + cmd.Use = "create" + cmd.Short = `Create a new user.` + cmd.Long = `Create a new user. + + Creates a new user in the Databricks workspace. This new user will also be + added to the Databricks account.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(0) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := createJson.Unmarshal(&createReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + + response, err := w.UsersV2.Create(ctx, createReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +// start delete command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *iam.DeleteUserRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq iam.DeleteUserRequest + + cmd.Use = "delete ID" + cmd.Short = `Delete a user.` + cmd.Long = `Delete a user. + + Deletes a user. Deleting a user from a Databricks workspace also removes + objects associated with the user. + + Arguments: + ID: Unique ID for a user in the Databricks workspace.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + deleteReq.Id = args[0] + + err = w.UsersV2.Delete(ctx, deleteReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +// start get command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *iam.GetUserRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq iam.GetUserRequest + + cmd.Flags().StringVar(&getReq.Attributes, "attributes", getReq.Attributes, `Comma-separated list of attributes to return in response.`) + cmd.Flags().IntVar(&getReq.Count, "count", getReq.Count, `Desired number of results per page.`) + cmd.Flags().StringVar(&getReq.ExcludedAttributes, "excluded-attributes", getReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) + cmd.Flags().StringVar(&getReq.Filter, "filter", getReq.Filter, `Query by which the results have to be filtered.`) + cmd.Flags().StringVar(&getReq.SortBy, "sort-by", getReq.SortBy, `Attribute to sort the results.`) + cmd.Flags().Var(&getReq.SortOrder, "sort-order", `The order to sort the results. Supported values: [ascending, descending]`) + cmd.Flags().IntVar(&getReq.StartIndex, "start-index", getReq.StartIndex, `Specifies the index of the first result.`) + + cmd.Use = "get ID" + cmd.Short = `Get user details.` + cmd.Long = `Get user details. + + Gets information for a specific user in Databricks workspace. + + Arguments: + ID: Unique ID for a user in the Databricks workspace.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + getReq.Id = args[0] + + response, err := w.UsersV2.Get(ctx, getReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +// start get-permission-levels command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionLevelsOverrides []func( + *cobra.Command, + *iam.GetPasswordPermissionLevelsRequest, +) + +func newGetPermissionLevels() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionLevelsReq iam.GetPasswordPermissionLevelsRequest + + cmd.Use = "get-permission-levels" + cmd.Short = `Get password permission levels.` + cmd.Long = `Get password permission levels. + + Gets the permission levels that a user can have on an object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + response, err := w.UsersV2.GetPermissionLevels(ctx, getPermissionLevelsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionLevelsOverrides { + fn(cmd, &getPermissionLevelsReq) + } + + return cmd +} + +// start get-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionsOverrides []func( + *cobra.Command, + *iam.GetPasswordPermissionsRequest, +) + +func newGetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionsReq iam.GetPasswordPermissionsRequest + + cmd.Use = "get-permissions" + cmd.Short = `Get password permissions.` + cmd.Long = `Get password permissions. + + Gets the permissions of all passwords. Passwords can inherit permissions from + their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + response, err := w.UsersV2.GetPermissions(ctx, getPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionsOverrides { + fn(cmd, &getPermissionsReq) + } + + return cmd +} + +// start list command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *iam.ListUsersRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq iam.ListUsersRequest + + cmd.Flags().StringVar(&listReq.Attributes, "attributes", listReq.Attributes, `Comma-separated list of attributes to return in response.`) + cmd.Flags().Int64Var(&listReq.Count, "count", listReq.Count, `Desired number of results per page.`) + cmd.Flags().StringVar(&listReq.ExcludedAttributes, "excluded-attributes", listReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) + cmd.Flags().StringVar(&listReq.Filter, "filter", listReq.Filter, `Query by which the results have to be filtered.`) + cmd.Flags().StringVar(&listReq.SortBy, "sort-by", listReq.SortBy, `Attribute to sort the results.`) + cmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order to sort the results. Supported values: [ascending, descending]`) + cmd.Flags().Int64Var(&listReq.StartIndex, "start-index", listReq.StartIndex, `Specifies the index of the first result.`) + + cmd.Use = "list" + cmd.Short = `List users.` + cmd.Long = `List users. + + Gets details for all the users associated with a Databricks workspace.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(0) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + response := w.UsersV2.List(ctx, listReq) + return cmdio.RenderIterator(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +// start patch command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var patchOverrides []func( + *cobra.Command, + *iam.PatchUserRequest, +) + +func newPatch() *cobra.Command { + cmd := &cobra.Command{} + + var patchReq iam.PatchUserRequest + var patchJson flags.JsonFlag + + cmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: Operations + // TODO: array: schemas + + cmd.Use = "patch ID" + cmd.Short = `Update user details.` + cmd.Long = `Update user details. + + Partially updates a user resource by applying the supplied operations on + specific user attributes. + + Arguments: + ID: Unique ID in the Databricks workspace.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := patchJson.Unmarshal(&patchReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + patchReq.Id = args[0] + + err = w.UsersV2.Patch(ctx, patchReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range patchOverrides { + fn(cmd, &patchReq) + } + + return cmd +} + +// start set-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setPermissionsOverrides []func( + *cobra.Command, + *iam.PasswordPermissionsRequest, +) + +func newSetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var setPermissionsReq iam.PasswordPermissionsRequest + var setPermissionsJson flags.JsonFlag + + cmd.Flags().Var(&setPermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "set-permissions" + cmd.Short = `Set password permissions.` + cmd.Long = `Set password permissions. + + Sets permissions on an object, replacing existing permissions if they exist. + Deletes all direct permissions if none are specified. Objects can inherit + permissions from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(0) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := setPermissionsJson.Unmarshal(&setPermissionsReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + + response, err := w.UsersV2.SetPermissions(ctx, setPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setPermissionsOverrides { + fn(cmd, &setPermissionsReq) + } + + return cmd +} + +// start update command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *iam.UpdateUserRequest, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq iam.UpdateUserRequest + var updateJson flags.JsonFlag + + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().BoolVar(&updateReq.Active, "active", updateReq.Active, `If this user is active.`) + cmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `String that represents a concatenation of given and family names.`) + // TODO: array: emails + // TODO: array: entitlements + cmd.Flags().StringVar(&updateReq.ExternalId, "external-id", updateReq.ExternalId, `External ID is not currently supported.`) + // TODO: array: groups + // TODO: complex arg: name + // TODO: array: roles + // TODO: array: schemas + cmd.Flags().StringVar(&updateReq.UserName, "user-name", updateReq.UserName, `Email address of the Databricks user.`) + + cmd.Use = "update ID" + cmd.Short = `Replace a user.` + cmd.Long = `Replace a user. + + Replaces a user's information with the data supplied in request. + + Arguments: + ID: Databricks user ID.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := updateJson.Unmarshal(&updateReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + updateReq.Id = args[0] + + err = w.UsersV2.Update(ctx, updateReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +// start update-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePermissionsOverrides []func( + *cobra.Command, + *iam.PasswordPermissionsRequest, +) + +func newUpdatePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var updatePermissionsReq iam.PasswordPermissionsRequest + var updatePermissionsJson flags.JsonFlag + + cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "update-permissions" + cmd.Short = `Update password permissions.` + cmd.Long = `Update password permissions. + + Updates the permissions on all passwords. Passwords can inherit permissions + from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(0) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := updatePermissionsJson.Unmarshal(&updatePermissionsReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + + response, err := w.UsersV2.UpdatePermissions(ctx, updatePermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePermissionsOverrides { + fn(cmd, &updatePermissionsReq) + } + + return cmd +} + +// end service UsersV2 diff --git a/cmd/workspace/warehouses/warehouses.go b/cmd/workspace/warehouses/warehouses.go index 8928a265a8..439d988074 100755 --- a/cmd/workspace/warehouses/warehouses.go +++ b/cmd/workspace/warehouses/warehouses.go @@ -619,7 +619,7 @@ func newList() *cobra.Command { cmd.Short = `List warehouses.` cmd.Long = `List warehouses. - Lists all SQL warehouses that a user has manager permissions on.` + Lists all SQL warehouses that a user has access to.` cmd.Annotations = make(map[string]string) diff --git a/cmd/workspace/workspace-iam-v2/workspace-iam-v2.go b/cmd/workspace/workspace-iam-v2/workspace-iam-v2.go new file mode 100755 index 0000000000..bfa75ba936 --- /dev/null +++ b/cmd/workspace/workspace-iam-v2/workspace-iam-v2.go @@ -0,0 +1,361 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package workspace_iam_v2 + +import ( + "fmt" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdctx" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/iamv2" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "workspace-iam-v2", + Short: `These APIs are used to manage identities and the workspace access of these identities in .`, + Long: `These APIs are used to manage identities and the workspace access of these + identities in .`, + GroupID: "iamv2", + Annotations: map[string]string{ + "package": "iamv2", + }, + RunE: root.ReportUnknownSubcommand, + } + + // Add methods + cmd.AddCommand(newGetWorkspaceAccessDetailLocal()) + cmd.AddCommand(newResolveGroupProxy()) + cmd.AddCommand(newResolveServicePrincipalProxy()) + cmd.AddCommand(newResolveUserProxy()) + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start get-workspace-access-detail-local command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getWorkspaceAccessDetailLocalOverrides []func( + *cobra.Command, + *iamv2.GetWorkspaceAccessDetailLocalRequest, +) + +func newGetWorkspaceAccessDetailLocal() *cobra.Command { + cmd := &cobra.Command{} + + var getWorkspaceAccessDetailLocalReq iamv2.GetWorkspaceAccessDetailLocalRequest + + cmd.Flags().Var(&getWorkspaceAccessDetailLocalReq.View, "view", `Controls what fields are returned. Supported values: [BASIC, FULL]`) + + cmd.Use = "get-workspace-access-detail-local PRINCIPAL_ID" + cmd.Short = `Get workspace access details for a principal.` + cmd.Long = `Get workspace access details for a principal. + + Returns the access details for a principal in the current workspace. Allows + for checking access details for any provisioned principal (user, service + principal, or group) in the current workspace. * Provisioned principal here + refers to one that has been synced into Databricks from the customer's IdP or + added explicitly to Databricks via SCIM/UI. Allows for passing in a "view" + parameter to control what fields are returned (BASIC by default or FULL). + + Arguments: + PRINCIPAL_ID: Required. The internal ID of the principal (user/sp/group) for which the + access details are being requested.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + _, err = fmt.Sscan(args[0], &getWorkspaceAccessDetailLocalReq.PrincipalId) + if err != nil { + return fmt.Errorf("invalid PRINCIPAL_ID: %s", args[0]) + } + + response, err := w.WorkspaceIamV2.GetWorkspaceAccessDetailLocal(ctx, getWorkspaceAccessDetailLocalReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getWorkspaceAccessDetailLocalOverrides { + fn(cmd, &getWorkspaceAccessDetailLocalReq) + } + + return cmd +} + +// start resolve-group-proxy command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var resolveGroupProxyOverrides []func( + *cobra.Command, + *iamv2.ResolveGroupProxyRequest, +) + +func newResolveGroupProxy() *cobra.Command { + cmd := &cobra.Command{} + + var resolveGroupProxyReq iamv2.ResolveGroupProxyRequest + var resolveGroupProxyJson flags.JsonFlag + + cmd.Flags().Var(&resolveGroupProxyJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Use = "resolve-group-proxy EXTERNAL_ID" + cmd.Short = `Resolve an external group in the Databricks account.` + cmd.Long = `Resolve an external group in the Databricks account. + + Resolves a group with the given external ID from the customer's IdP. If the + group does not exist, it will be created in the account. If the customer is + not onboarded onto Automatic Identity Management (AIM), this will return an + error. + + Arguments: + EXTERNAL_ID: Required. The external ID of the group in the customer's IdP.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + if cmd.Flags().Changed("json") { + err := root.ExactArgs(0)(cmd, args) + if err != nil { + return fmt.Errorf("when --json flag is specified, no positional arguments are required. Provide 'external_id' in your JSON input") + } + return nil + } + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := resolveGroupProxyJson.Unmarshal(&resolveGroupProxyReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + if !cmd.Flags().Changed("json") { + resolveGroupProxyReq.ExternalId = args[0] + } + + response, err := w.WorkspaceIamV2.ResolveGroupProxy(ctx, resolveGroupProxyReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range resolveGroupProxyOverrides { + fn(cmd, &resolveGroupProxyReq) + } + + return cmd +} + +// start resolve-service-principal-proxy command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var resolveServicePrincipalProxyOverrides []func( + *cobra.Command, + *iamv2.ResolveServicePrincipalProxyRequest, +) + +func newResolveServicePrincipalProxy() *cobra.Command { + cmd := &cobra.Command{} + + var resolveServicePrincipalProxyReq iamv2.ResolveServicePrincipalProxyRequest + var resolveServicePrincipalProxyJson flags.JsonFlag + + cmd.Flags().Var(&resolveServicePrincipalProxyJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Use = "resolve-service-principal-proxy EXTERNAL_ID" + cmd.Short = `Resolve an external service principal in the Databricks account.` + cmd.Long = `Resolve an external service principal in the Databricks account. + + Resolves an SP with the given external ID from the customer's IdP. If the SP + does not exist, it will be created. If the customer is not onboarded onto + Automatic Identity Management (AIM), this will return an error. + + Arguments: + EXTERNAL_ID: Required. The external ID of the service principal in the customer's IdP.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + if cmd.Flags().Changed("json") { + err := root.ExactArgs(0)(cmd, args) + if err != nil { + return fmt.Errorf("when --json flag is specified, no positional arguments are required. Provide 'external_id' in your JSON input") + } + return nil + } + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := resolveServicePrincipalProxyJson.Unmarshal(&resolveServicePrincipalProxyReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + if !cmd.Flags().Changed("json") { + resolveServicePrincipalProxyReq.ExternalId = args[0] + } + + response, err := w.WorkspaceIamV2.ResolveServicePrincipalProxy(ctx, resolveServicePrincipalProxyReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range resolveServicePrincipalProxyOverrides { + fn(cmd, &resolveServicePrincipalProxyReq) + } + + return cmd +} + +// start resolve-user-proxy command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var resolveUserProxyOverrides []func( + *cobra.Command, + *iamv2.ResolveUserProxyRequest, +) + +func newResolveUserProxy() *cobra.Command { + cmd := &cobra.Command{} + + var resolveUserProxyReq iamv2.ResolveUserProxyRequest + var resolveUserProxyJson flags.JsonFlag + + cmd.Flags().Var(&resolveUserProxyJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Use = "resolve-user-proxy EXTERNAL_ID" + cmd.Short = `Resolve an external user in the Databricks account.` + cmd.Long = `Resolve an external user in the Databricks account. + + Resolves a user with the given external ID from the customer's IdP. If the + user does not exist, it will be created. If the customer is not onboarded onto + Automatic Identity Management (AIM), this will return an error. + + Arguments: + EXTERNAL_ID: Required. The external ID of the user in the customer's IdP.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + if cmd.Flags().Changed("json") { + err := root.ExactArgs(0)(cmd, args) + if err != nil { + return fmt.Errorf("when --json flag is specified, no positional arguments are required. Provide 'external_id' in your JSON input") + } + return nil + } + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := resolveUserProxyJson.Unmarshal(&resolveUserProxyReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + if !cmd.Flags().Changed("json") { + resolveUserProxyReq.ExternalId = args[0] + } + + response, err := w.WorkspaceIamV2.ResolveUserProxy(ctx, resolveUserProxyReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range resolveUserProxyOverrides { + fn(cmd, &resolveUserProxyReq) + } + + return cmd +} + +// end service workspace_iamV2 diff --git a/cmd/workspace/workspace-settings-v2/workspace-settings-v2.go b/cmd/workspace/workspace-settings-v2/workspace-settings-v2.go index 9d50deb324..42890935cd 100755 --- a/cmd/workspace/workspace-settings-v2/workspace-settings-v2.go +++ b/cmd/workspace/workspace-settings-v2/workspace-settings-v2.go @@ -24,10 +24,7 @@ func New() *cobra.Command { Annotations: map[string]string{ "package": "settingsv2", }, - - // This service is being previewed; hide from help output. - Hidden: true, - RunE: root.ReportUnknownSubcommand, + RunE: root.ReportUnknownSubcommand, } // Add methods @@ -61,7 +58,9 @@ func newGetPublicWorkspaceSetting() *cobra.Command { cmd.Short = `Get a workspace setting.` cmd.Long = `Get a workspace setting. - Get a setting value at workspace level` + Get a setting value at workspace level. See + :method:settingsv2/listworkspacesettingsmetadata for list of setting available + via public APIs.` cmd.Annotations = make(map[string]string) @@ -117,12 +116,9 @@ func newListWorkspaceSettingsMetadata() *cobra.Command { cmd.Short = `List valid setting keys and their metadata.` cmd.Long = `List valid setting keys and their metadata. - List valid setting keys and metadata. These settings are available to - referenced via [GET - /api/2.1/settings/{name}](#~1api~1workspace~1settingsv2~1getpublicworkspacesetting) - and [PATCH - /api/2.1/settings/{name}](#~1api~1workspace~1settingsv2~patchpublicworkspacesetting) - APIs` + List valid setting keys and metadata. These settings are available to be + referenced via GET :method:settingsv2/getpublicworkspacesetting and PATCH + :method:settingsv2/patchpublicworkspacesetting APIs` cmd.Annotations = make(map[string]string) @@ -174,12 +170,10 @@ func newPatchPublicWorkspaceSetting() *cobra.Command { // TODO: complex arg: aibi_dashboard_embedding_approved_domains // TODO: complex arg: automatic_cluster_update_workspace // TODO: complex arg: boolean_val - // TODO: complex arg: default_data_security_mode // TODO: complex arg: effective_aibi_dashboard_embedding_access_policy // TODO: complex arg: effective_aibi_dashboard_embedding_approved_domains // TODO: complex arg: effective_automatic_cluster_update_workspace // TODO: complex arg: effective_boolean_val - // TODO: complex arg: effective_default_data_security_mode // TODO: complex arg: effective_integer_val // TODO: complex arg: effective_personal_compute // TODO: complex arg: effective_restrict_workspace_admins @@ -194,7 +188,9 @@ func newPatchPublicWorkspaceSetting() *cobra.Command { cmd.Short = `Update a workspace setting.` cmd.Long = `Update a workspace setting. - Patch a setting value at workspace level` + Patch a setting value at workspace level. See + :method:settingsv2/listworkspacesettingsmetadata for list of setting available + via public APIs at workspace level.` cmd.Annotations = make(map[string]string) diff --git a/experimental/python/databricks/bundles/jobs/_models/environment.py b/experimental/python/databricks/bundles/jobs/_models/environment.py index f7606430ec..e2b88cb481 100644 --- a/experimental/python/databricks/bundles/jobs/_models/environment.py +++ b/experimental/python/databricks/bundles/jobs/_models/environment.py @@ -33,7 +33,7 @@ class Environment: The version is a string, consisting of an integer. """ - jar_dependencies: VariableOrList[str] = field(default_factory=list) + java_dependencies: VariableOrList[str] = field(default_factory=list) """ :meta private: [EXPERIMENTAL] @@ -68,7 +68,7 @@ class EnvironmentDict(TypedDict, total=False): The version is a string, consisting of an integer. """ - jar_dependencies: VariableOrList[str] + java_dependencies: VariableOrList[str] """ :meta private: [EXPERIMENTAL] diff --git a/experimental/python/databricks/bundles/jobs/_models/spark_submit_task.py b/experimental/python/databricks/bundles/jobs/_models/spark_submit_task.py index edc57577b5..c809dbe721 100644 --- a/experimental/python/databricks/bundles/jobs/_models/spark_submit_task.py +++ b/experimental/python/databricks/bundles/jobs/_models/spark_submit_task.py @@ -11,7 +11,9 @@ @dataclass(kw_only=True) class SparkSubmitTask: - """""" + """ + [DEPRECATED] + """ parameters: VariableOrList[str] = field(default_factory=list) """ diff --git a/experimental/python/databricks/bundles/jobs/_models/task.py b/experimental/python/databricks/bundles/jobs/_models/task.py index 2498c0db08..0393213b65 100644 --- a/experimental/python/databricks/bundles/jobs/_models/task.py +++ b/experimental/python/databricks/bundles/jobs/_models/task.py @@ -104,7 +104,7 @@ class Task: clean_rooms_notebook_task: VariableOrOptional[CleanRoomsNotebookTask] = None """ - The task runs a [clean rooms](https://docs.databricks.com/en/clean-rooms/index.html) notebook + The task runs a [clean rooms](https://docs.databricks.com/clean-rooms/index.html) notebook when the `clean_rooms_notebook_task` field is present. """ @@ -145,6 +145,13 @@ class Task: An option to disable auto optimization in serverless """ + disabled: VariableOrOptional[bool] = None + """ + :meta private: [EXPERIMENTAL] + + An optional flag to disable the task. If set to true, the task will not run even if it is part of a job. + """ + email_notifications: VariableOrOptional[TaskEmailNotifications] = None """ An optional set of email addresses that is notified when runs of this task begin or complete as well as when this task is deleted. The default behavior is to not send any emails. @@ -261,15 +268,7 @@ class Task: spark_submit_task: VariableOrOptional[SparkSubmitTask] = None """ - (Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present. This task can run only on new clusters and is not compatible with serverless compute. - - In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark configurations. - - `master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you _cannot_ specify them in parameters. - - By default, the Spark submit job uses all available memory (excluding reserved memory for Databricks services). You can set `--driver-memory`, and `--executor-memory` to a smaller value to leave some room for off-heap usage. - - The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths. + [DEPRECATED] (Legacy) The task runs the spark-submit script when the spark_submit_task field is present. Databricks recommends using the spark_jar_task instead; see [Spark Submit task for jobs](/jobs/spark-submit). """ sql_task: VariableOrOptional[SqlTask] = None @@ -307,7 +306,7 @@ class TaskDict(TypedDict, total=False): clean_rooms_notebook_task: VariableOrOptional[CleanRoomsNotebookTaskParam] """ - The task runs a [clean rooms](https://docs.databricks.com/en/clean-rooms/index.html) notebook + The task runs a [clean rooms](https://docs.databricks.com/clean-rooms/index.html) notebook when the `clean_rooms_notebook_task` field is present. """ @@ -348,6 +347,13 @@ class TaskDict(TypedDict, total=False): An option to disable auto optimization in serverless """ + disabled: VariableOrOptional[bool] + """ + :meta private: [EXPERIMENTAL] + + An optional flag to disable the task. If set to true, the task will not run even if it is part of a job. + """ + email_notifications: VariableOrOptional[TaskEmailNotificationsParam] """ An optional set of email addresses that is notified when runs of this task begin or complete as well as when this task is deleted. The default behavior is to not send any emails. @@ -464,15 +470,7 @@ class TaskDict(TypedDict, total=False): spark_submit_task: VariableOrOptional[SparkSubmitTaskParam] """ - (Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present. This task can run only on new clusters and is not compatible with serverless compute. - - In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark configurations. - - `master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you _cannot_ specify them in parameters. - - By default, the Spark submit job uses all available memory (excluding reserved memory for Databricks services). You can set `--driver-memory`, and `--executor-memory` to a smaller value to leave some room for off-heap usage. - - The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths. + [DEPRECATED] (Legacy) The task runs the spark-submit script when the spark_submit_task field is present. Databricks recommends using the spark_jar_task instead; see [Spark Submit task for jobs](/jobs/spark-submit). """ sql_task: VariableOrOptional[SqlTaskParam] diff --git a/experimental/python/databricks/bundles/pipelines/__init__.py b/experimental/python/databricks/bundles/pipelines/__init__.py index 8801727328..a4bcfea524 100644 --- a/experimental/python/databricks/bundles/pipelines/__init__.py +++ b/experimental/python/databricks/bundles/pipelines/__init__.py @@ -51,6 +51,12 @@ "IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig", "IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfigDict", "IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfigParam", + "IngestionPipelineDefinitionWorkdayReportParameters", + "IngestionPipelineDefinitionWorkdayReportParametersDict", + "IngestionPipelineDefinitionWorkdayReportParametersParam", + "IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue", + "IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueDict", + "IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueParam", "IngestionSourceType", "IngestionSourceTypeParam", "InitScriptInfo", @@ -230,6 +236,16 @@ IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfigDict, IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfigParam, ) +from databricks.bundles.pipelines._models.ingestion_pipeline_definition_workday_report_parameters import ( + IngestionPipelineDefinitionWorkdayReportParameters, + IngestionPipelineDefinitionWorkdayReportParametersDict, + IngestionPipelineDefinitionWorkdayReportParametersParam, +) +from databricks.bundles.pipelines._models.ingestion_pipeline_definition_workday_report_parameters_query_key_value import ( + IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue, + IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueDict, + IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueParam, +) from databricks.bundles.pipelines._models.ingestion_source_type import ( IngestionSourceType, IngestionSourceTypeParam, diff --git a/experimental/python/databricks/bundles/pipelines/_models/ingestion_pipeline_definition.py b/experimental/python/databricks/bundles/pipelines/_models/ingestion_pipeline_definition.py index cad856056d..cf3947f4d4 100644 --- a/experimental/python/databricks/bundles/pipelines/_models/ingestion_pipeline_definition.py +++ b/experimental/python/databricks/bundles/pipelines/_models/ingestion_pipeline_definition.py @@ -39,6 +39,15 @@ class IngestionPipelineDefinition: Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate with the source database. This is used with connectors to databases like SQL Server. """ + netsuite_jar_path: VariableOrOptional[str] = None + """ + :meta private: [EXPERIMENTAL] + + Netsuite only configuration. When the field is set for a netsuite connector, + the jar stored in the field will be validated and added to the classpath of + pipeline's cluster. + """ + objects: VariableOrList[IngestionConfig] = field(default_factory=list) """ Required. Settings specifying tables to replicate and the destination for the replicated tables. @@ -84,6 +93,15 @@ class IngestionPipelineDefinitionDict(TypedDict, total=False): Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate with the source database. This is used with connectors to databases like SQL Server. """ + netsuite_jar_path: VariableOrOptional[str] + """ + :meta private: [EXPERIMENTAL] + + Netsuite only configuration. When the field is set for a netsuite connector, + the jar stored in the field will be validated and added to the classpath of + pipeline's cluster. + """ + objects: VariableOrList[IngestionConfigParam] """ Required. Settings specifying tables to replicate and the destination for the replicated tables. diff --git a/experimental/python/databricks/bundles/pipelines/_models/ingestion_pipeline_definition_workday_report_parameters.py b/experimental/python/databricks/bundles/pipelines/_models/ingestion_pipeline_definition_workday_report_parameters.py new file mode 100644 index 0000000000..d48d68495d --- /dev/null +++ b/experimental/python/databricks/bundles/pipelines/_models/ingestion_pipeline_definition_workday_report_parameters.py @@ -0,0 +1,95 @@ +from dataclasses import dataclass, field +from typing import TYPE_CHECKING, TypedDict + +from databricks.bundles.core._transform import _transform +from databricks.bundles.core._transform_to_json import _transform_to_json_value +from databricks.bundles.core._variable import ( + VariableOrDict, + VariableOrList, + VariableOrOptional, +) +from databricks.bundles.pipelines._models.ingestion_pipeline_definition_workday_report_parameters_query_key_value import ( + IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue, + IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueParam, +) + +if TYPE_CHECKING: + from typing_extensions import Self + + +@dataclass(kw_only=True) +class IngestionPipelineDefinitionWorkdayReportParameters: + """ + :meta private: [EXPERIMENTAL] + """ + + incremental: VariableOrOptional[bool] = None + """ + [DEPRECATED] (Optional) Marks the report as incremental. + This field is deprecated and should not be used. Use `parameters` instead. The incremental behavior is now + controlled by the `parameters` field. + """ + + parameters: VariableOrDict[str] = field(default_factory=dict) + """ + Parameters for the Workday report. Each key represents the parameter name (e.g., "start_date", "end_date"), + and the corresponding value is a SQL-like expression used to compute the parameter value at runtime. + Example: + { + "start_date": "{ coalesce(current_offset(), date(\"2025-02-01\")) }", + "end_date": "{ current_date() - INTERVAL 1 DAY }" + } + """ + + report_parameters: VariableOrList[ + IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue + ] = field(default_factory=list) + """ + [DEPRECATED] (Optional) Additional custom parameters for Workday Report + This field is deprecated and should not be used. Use `parameters` instead. + """ + + @classmethod + def from_dict( + cls, value: "IngestionPipelineDefinitionWorkdayReportParametersDict" + ) -> "Self": + return _transform(cls, value) + + def as_dict(self) -> "IngestionPipelineDefinitionWorkdayReportParametersDict": + return _transform_to_json_value(self) # type:ignore + + +class IngestionPipelineDefinitionWorkdayReportParametersDict(TypedDict, total=False): + """""" + + incremental: VariableOrOptional[bool] + """ + [DEPRECATED] (Optional) Marks the report as incremental. + This field is deprecated and should not be used. Use `parameters` instead. The incremental behavior is now + controlled by the `parameters` field. + """ + + parameters: VariableOrDict[str] + """ + Parameters for the Workday report. Each key represents the parameter name (e.g., "start_date", "end_date"), + and the corresponding value is a SQL-like expression used to compute the parameter value at runtime. + Example: + { + "start_date": "{ coalesce(current_offset(), date(\"2025-02-01\")) }", + "end_date": "{ current_date() - INTERVAL 1 DAY }" + } + """ + + report_parameters: VariableOrList[ + IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueParam + ] + """ + [DEPRECATED] (Optional) Additional custom parameters for Workday Report + This field is deprecated and should not be used. Use `parameters` instead. + """ + + +IngestionPipelineDefinitionWorkdayReportParametersParam = ( + IngestionPipelineDefinitionWorkdayReportParametersDict + | IngestionPipelineDefinitionWorkdayReportParameters +) diff --git a/experimental/python/databricks/bundles/pipelines/_models/ingestion_pipeline_definition_workday_report_parameters_query_key_value.py b/experimental/python/databricks/bundles/pipelines/_models/ingestion_pipeline_definition_workday_report_parameters_query_key_value.py new file mode 100644 index 0000000000..2a24858d66 --- /dev/null +++ b/experimental/python/databricks/bundles/pipelines/_models/ingestion_pipeline_definition_workday_report_parameters_query_key_value.py @@ -0,0 +1,70 @@ +from dataclasses import dataclass +from typing import TYPE_CHECKING, TypedDict + +from databricks.bundles.core._transform import _transform +from databricks.bundles.core._transform_to_json import _transform_to_json_value +from databricks.bundles.core._variable import VariableOrOptional + +if TYPE_CHECKING: + from typing_extensions import Self + + +@dataclass(kw_only=True) +class IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue: + """ + :meta private: [EXPERIMENTAL] + + [DEPRECATED] + """ + + key: VariableOrOptional[str] = None + """ + Key for the report parameter, can be a column name or other metadata + """ + + value: VariableOrOptional[str] = None + """ + Value for the report parameter. + Possible values it can take are these sql functions: + 1. coalesce(current_offset(), date("YYYY-MM-DD")) -> if current_offset() is null, then the passed date, else current_offset() + 2. current_date() + 3. date_sub(current_date(), x) -> subtract x (some non-negative integer) days from current date + """ + + @classmethod + def from_dict( + cls, + value: "IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueDict", + ) -> "Self": + return _transform(cls, value) + + def as_dict( + self, + ) -> "IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueDict": + return _transform_to_json_value(self) # type:ignore + + +class IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueDict( + TypedDict, total=False +): + """""" + + key: VariableOrOptional[str] + """ + Key for the report parameter, can be a column name or other metadata + """ + + value: VariableOrOptional[str] + """ + Value for the report parameter. + Possible values it can take are these sql functions: + 1. coalesce(current_offset(), date("YYYY-MM-DD")) -> if current_offset() is null, then the passed date, else current_offset() + 2. current_date() + 3. date_sub(current_date(), x) -> subtract x (some non-negative integer) days from current date + """ + + +IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueParam = ( + IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueDict + | IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue +) diff --git a/experimental/python/databricks/bundles/pipelines/_models/ingestion_source_type.py b/experimental/python/databricks/bundles/pipelines/_models/ingestion_source_type.py index cb8ceca59e..d9a90a022e 100644 --- a/experimental/python/databricks/bundles/pipelines/_models/ingestion_source_type.py +++ b/experimental/python/databricks/bundles/pipelines/_models/ingestion_source_type.py @@ -21,6 +21,7 @@ class IngestionSourceType(Enum): DYNAMICS365 = "DYNAMICS365" CONFLUENCE = "CONFLUENCE" META_MARKETING = "META_MARKETING" + FOREIGN_CATALOG = "FOREIGN_CATALOG" IngestionSourceTypeParam = ( @@ -43,6 +44,7 @@ class IngestionSourceType(Enum): "DYNAMICS365", "CONFLUENCE", "META_MARKETING", + "FOREIGN_CATALOG", ] | IngestionSourceType ) diff --git a/experimental/python/databricks/bundles/pipelines/_models/pipeline.py b/experimental/python/databricks/bundles/pipelines/_models/pipeline.py index 919238e743..0e698a64b8 100644 --- a/experimental/python/databricks/bundles/pipelines/_models/pipeline.py +++ b/experimental/python/databricks/bundles/pipelines/_models/pipeline.py @@ -184,9 +184,6 @@ class Pipeline(Resource): """ run_as: VariableOrOptional[RunAs] = None - """ - :meta private: [EXPERIMENTAL] - """ schema: VariableOrOptional[str] = None """ @@ -347,9 +344,6 @@ class PipelineDict(TypedDict, total=False): """ run_as: VariableOrOptional[RunAsParam] - """ - :meta private: [EXPERIMENTAL] - """ schema: VariableOrOptional[str] """ diff --git a/experimental/python/databricks/bundles/pipelines/_models/run_as.py b/experimental/python/databricks/bundles/pipelines/_models/run_as.py index dadceecac7..b4d52af00a 100644 --- a/experimental/python/databricks/bundles/pipelines/_models/run_as.py +++ b/experimental/python/databricks/bundles/pipelines/_models/run_as.py @@ -12,8 +12,6 @@ @dataclass(kw_only=True) class RunAs: """ - :meta private: [EXPERIMENTAL] - Write-only setting, available only in Create/Update calls. Specifies the user or service principal that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline. Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is thrown. diff --git a/experimental/python/databricks/bundles/pipelines/_models/table_specific_config.py b/experimental/python/databricks/bundles/pipelines/_models/table_specific_config.py index ca39efaa13..7211a9a951 100644 --- a/experimental/python/databricks/bundles/pipelines/_models/table_specific_config.py +++ b/experimental/python/databricks/bundles/pipelines/_models/table_specific_config.py @@ -8,6 +8,10 @@ IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig, IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfigParam, ) +from databricks.bundles.pipelines._models.ingestion_pipeline_definition_workday_report_parameters import ( + IngestionPipelineDefinitionWorkdayReportParameters, + IngestionPipelineDefinitionWorkdayReportParametersParam, +) from databricks.bundles.pipelines._models.table_specific_config_scd_type import ( TableSpecificConfigScdType, TableSpecificConfigScdTypeParam, @@ -71,6 +75,15 @@ class TableSpecificConfig: The column names specifying the logical order of events in the source data. Delta Live Tables uses this sequencing to handle change events that arrive out of order. """ + workday_report_parameters: VariableOrOptional[ + IngestionPipelineDefinitionWorkdayReportParameters + ] = None + """ + :meta private: [EXPERIMENTAL] + + (Optional) Additional custom parameters for Workday Report + """ + @classmethod def from_dict(cls, value: "TableSpecificConfigDict") -> "Self": return _transform(cls, value) @@ -132,5 +145,14 @@ class TableSpecificConfigDict(TypedDict, total=False): The column names specifying the logical order of events in the source data. Delta Live Tables uses this sequencing to handle change events that arrive out of order. """ + workday_report_parameters: VariableOrOptional[ + IngestionPipelineDefinitionWorkdayReportParametersParam + ] + """ + :meta private: [EXPERIMENTAL] + + (Optional) Additional custom parameters for Workday Report + """ + TableSpecificConfigParam = TableSpecificConfigDict | TableSpecificConfig diff --git a/go.mod b/go.mod index 2bedbfc9e6..5fc80abac0 100644 --- a/go.mod +++ b/go.mod @@ -9,7 +9,7 @@ require ( github.com/BurntSushi/toml v1.5.0 // MIT github.com/Masterminds/semver/v3 v3.4.0 // MIT github.com/briandowns/spinner v1.23.1 // Apache 2.0 - github.com/databricks/databricks-sdk-go v0.82.0 // Apache 2.0 + github.com/databricks/databricks-sdk-go v0.85.0 // Apache 2.0 github.com/fatih/color v1.18.0 // MIT github.com/google/uuid v1.6.0 // BSD-3-Clause github.com/gorilla/mux v1.8.1 // BSD 3-Clause @@ -29,7 +29,7 @@ require ( github.com/spf13/pflag v1.0.10 // BSD-3-Clause github.com/stretchr/testify v1.11.1 // MIT golang.org/x/crypto v0.42.0 // BSD-3-Clause - golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b + golang.org/x/exp v0.0.0-20250911091902-df9299821621 golang.org/x/mod v0.28.0 golang.org/x/oauth2 v0.31.0 golang.org/x/sync v0.17.0 @@ -41,9 +41,9 @@ require ( ) require ( - cloud.google.com/go/auth v0.16.2 // indirect + cloud.google.com/go/auth v0.16.5 // indirect cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect - cloud.google.com/go/compute/metadata v0.7.0 // indirect + cloud.google.com/go/compute/metadata v0.8.4 // indirect github.com/ProtonMail/go-crypto v1.1.6 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect github.com/bitfield/gotestdox v0.2.2 // indirect @@ -62,7 +62,7 @@ require ( github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect github.com/google/yamlfmt v0.17.0 // indirect github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect - github.com/googleapis/gax-go/v2 v2.14.2 // indirect + github.com/googleapis/gax-go/v2 v2.15.0 // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect github.com/hashicorp/go-retryablehttp v0.7.7 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect @@ -71,18 +71,18 @@ require ( github.com/pmezard/go-difflib v1.0.0 // indirect github.com/stretchr/objx v0.5.2 // indirect github.com/zclconf/go-cty v1.16.4 // indirect - go.opentelemetry.io/auto/sdk v1.1.0 // indirect - go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect - go.opentelemetry.io/otel v1.36.0 // indirect - go.opentelemetry.io/otel/metric v1.36.0 // indirect - go.opentelemetry.io/otel/trace v1.36.0 // indirect - golang.org/x/net v0.43.0 // indirect - golang.org/x/time v0.12.0 // indirect - golang.org/x/tools v0.36.0 // indirect - google.golang.org/api v0.238.0 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect - google.golang.org/grpc v1.73.0 // indirect - google.golang.org/protobuf v1.36.6 // indirect + go.opentelemetry.io/auto/sdk v1.2.1 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0 // indirect + go.opentelemetry.io/otel v1.38.0 // indirect + go.opentelemetry.io/otel/metric v1.38.0 // indirect + go.opentelemetry.io/otel/trace v1.38.0 // indirect + golang.org/x/net v0.44.0 // indirect + golang.org/x/time v0.13.0 // indirect + golang.org/x/tools v0.37.0 // indirect + google.golang.org/api v0.249.0 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20250922171735-9219d122eba9 // indirect + google.golang.org/grpc v1.75.1 // indirect + google.golang.org/protobuf v1.36.9 // indirect gotest.tools/gotestsum v1.12.1 // indirect ) diff --git a/go.sum b/go.sum index 3e32367cc6..522b2e7009 100644 --- a/go.sum +++ b/go.sum @@ -1,9 +1,9 @@ -cloud.google.com/go/auth v0.16.2 h1:QvBAGFPLrDeoiNjyfVunhQ10HKNYuOwZ5noee0M5df4= -cloud.google.com/go/auth v0.16.2/go.mod h1:sRBas2Y1fB1vZTdurouM0AzuYQBMZinrUYL8EufhtEA= +cloud.google.com/go/auth v0.16.5 h1:mFWNQ2FEVWAliEQWpAdH80omXFokmrnbDhUS9cBywsI= +cloud.google.com/go/auth v0.16.5/go.mod h1:utzRfHMP+Vv0mpOkTRQoWD2q3BatTOoWbA7gCc2dUhQ= cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc= cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c= -cloud.google.com/go/compute/metadata v0.7.0 h1:PBWF+iiAerVNe8UCHxdOt6eHLVc3ydFeOCw78U8ytSU= -cloud.google.com/go/compute/metadata v0.7.0/go.mod h1:j5MvL9PprKL39t166CoB1uVHfQMs4tFQZZcKwksXUjo= +cloud.google.com/go/compute/metadata v0.8.4 h1:oXMa1VMQBVCyewMIOm3WQsnVd9FbKBtm8reqWRaXnHQ= +cloud.google.com/go/compute/metadata v0.8.4/go.mod h1:E0bWwX5wTnLPedCKqk3pJmVgCBSM6qQI1yTBdEb3C10= dario.cat/mergo v1.0.2 h1:85+piFYR1tMbRrLcDwR18y4UKJ3aH1Tbzi24VRW1TK8= dario.cat/mergo v1.0.2/go.mod h1:E/hbnu0NxMFBjpMIE34DRGLWqDy0g5FuKDhCb31ngxA= github.com/BurntSushi/toml v1.5.0 h1:W5quZX/G/csjUnuI8SUYlsHs9M38FC7znL0lIO+DvMg= @@ -33,8 +33,8 @@ github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZ github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s= github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI= -github.com/databricks/databricks-sdk-go v0.82.0 h1:Amosg1Jp6M3w04jrvL+sIdMyPx7M+D1W/JtYJFwsGGA= -github.com/databricks/databricks-sdk-go v0.82.0/go.mod h1:xBtjeP9nq+6MgTewZW1EcbRkD7aDY9gZvcRPcwPhZjw= +github.com/databricks/databricks-sdk-go v0.85.0 h1:oDCioucFiIP3ioVqWkxFvN5jB+s4kxYvsFZYww/y4VI= +github.com/databricks/databricks-sdk-go v0.85.0/go.mod h1:hWoHnHbNLjPKiTm5K/7bcIv3J3Pkgo5x9pPzh8K3RVE= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -79,8 +79,8 @@ github.com/google/yamlfmt v0.17.0 h1:/tdp01rIlvLz3LgJ2NtMLnqgAadZm33P7GcPU680b+w github.com/google/yamlfmt v0.17.0/go.mod h1:gs0UEklJOYkUJ+OOCG0hg9n+DzucKDPlJElTUasVNK8= github.com/googleapis/enterprise-certificate-proxy v0.3.6 h1:GW/XbdyBFQ8Qe+YAmFU9uHLo7OnF5tL52HFAgMmyrf4= github.com/googleapis/enterprise-certificate-proxy v0.3.6/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA= -github.com/googleapis/gax-go/v2 v2.14.2 h1:eBLnkZ9635krYIPD+ag1USrOAI0Nr0QYF3+/3GqO0k0= -github.com/googleapis/gax-go/v2 v2.14.2/go.mod h1:ON64QhlJkhVtSqp4v1uaK92VyZ2gmvDQsweuyLV+8+w= +github.com/googleapis/gax-go/v2 v2.15.0 h1:SyjDc1mGgZU5LncH8gimWo9lW1DtIfPibOG81vgd/bo= +github.com/googleapis/gax-go/v2 v2.15.0/go.mod h1:zVVkkxAQHa1RQpg9z2AUCMnKhi0Qld9rcmyfL1OZhoc= github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg= @@ -133,8 +133,8 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/quasilyte/go-ruleguard/dsl v0.3.22 h1:wd8zkOhSNr+I+8Qeciml08ivDt1pSXe60+5DqOpCjPE= github.com/quasilyte/go-ruleguard/dsl v0.3.22/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= -github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= -github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06 h1:OkMGxebDjyw0ULyrTYWeN0UNCCkmCWfjPnIA2W6oviI= github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06/go.mod h1:+ePHsJ1keEjQtpvf9HHw0f4ZeJ0TLRsxhunSI2hYJSs= @@ -157,30 +157,30 @@ github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= github.com/zclconf/go-cty v1.16.4 h1:QGXaag7/7dCzb+odlGrgr+YmYZFaOCMW6DEpS+UD1eE= github.com/zclconf/go-cty v1.16.4/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE= -go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= -go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= +go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64= +go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 h1:q4XOmH/0opmeuJtPsbFNivyl7bCt7yRBbeEm2sC/XtQ= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0/go.mod h1:snMWehoOh2wsEwnvvwtDyFCxVeDAODenXHtn5vzrKjo= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 h1:F7Jx+6hwnZ41NSFTO5q4LYDtJRXBf2PD0rNBkeB/lus= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0/go.mod h1:UHB22Z8QsdRDrnAtX4PntOl36ajSxcdUMt1sF7Y6E7Q= -go.opentelemetry.io/otel v1.36.0 h1:UumtzIklRBY6cI/lllNZlALOF5nNIzJVb16APdvgTXg= -go.opentelemetry.io/otel v1.36.0/go.mod h1:/TcFMXYjyRNh8khOAO9ybYkqaDBb/70aVwkNML4pP8E= -go.opentelemetry.io/otel/metric v1.36.0 h1:MoWPKVhQvJ+eeXWHFBOPoBOi20jh6Iq2CcCREuTYufE= -go.opentelemetry.io/otel/metric v1.36.0/go.mod h1:zC7Ks+yeyJt4xig9DEw9kuUFe5C3zLbVjV2PzT6qzbs= -go.opentelemetry.io/otel/sdk v1.36.0 h1:b6SYIuLRs88ztox4EyrvRti80uXIFy+Sqzoh9kFULbs= -go.opentelemetry.io/otel/sdk v1.36.0/go.mod h1:+lC+mTgD+MUWfjJubi2vvXWcVxyr9rmlshZni72pXeY= -go.opentelemetry.io/otel/sdk/metric v1.36.0 h1:r0ntwwGosWGaa0CrSt8cuNuTcccMXERFwHX4dThiPis= -go.opentelemetry.io/otel/sdk/metric v1.36.0/go.mod h1:qTNOhFDfKRwX0yXOqJYegL5WRaW376QbB7P4Pb0qva4= -go.opentelemetry.io/otel/trace v1.36.0 h1:ahxWNuqZjpdiFAyrIoQ4GIiAIhxAunQR6MUoKrsNd4w= -go.opentelemetry.io/otel/trace v1.36.0/go.mod h1:gQ+OnDZzrybY4k4seLzPAWNwVBBVlF2szhehOBB/tGA= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0 h1:RbKq8BG0FI8OiXhBfcRtqqHcZcka+gU3cskNuf05R18= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0/go.mod h1:h06DGIukJOevXaj/xrNjhi/2098RZzcLTbc0jDAUbsg= +go.opentelemetry.io/otel v1.38.0 h1:RkfdswUDRimDg0m2Az18RKOsnI8UDzppJAtj01/Ymk8= +go.opentelemetry.io/otel v1.38.0/go.mod h1:zcmtmQ1+YmQM9wrNsTGV/q/uyusom3P8RxwExxkZhjM= +go.opentelemetry.io/otel/metric v1.38.0 h1:Kl6lzIYGAh5M159u9NgiRkmoMKjvbsKtYRwgfrA6WpA= +go.opentelemetry.io/otel/metric v1.38.0/go.mod h1:kB5n/QoRM8YwmUahxvI3bO34eVtQf2i4utNVLr9gEmI= +go.opentelemetry.io/otel/sdk v1.38.0 h1:l48sr5YbNf2hpCUj/FoGhW9yDkl+Ma+LrVl8qaM5b+E= +go.opentelemetry.io/otel/sdk v1.38.0/go.mod h1:ghmNdGlVemJI3+ZB5iDEuk4bWA3GkTpW+DOoZMYBVVg= +go.opentelemetry.io/otel/sdk/metric v1.38.0 h1:aSH66iL0aZqo//xXzQLYozmWrXxyFkBJ6qT5wthqPoM= +go.opentelemetry.io/otel/sdk/metric v1.38.0/go.mod h1:dg9PBnW9XdQ1Hd6ZnRz689CbtrUp0wMMs9iPcgT9EZA= +go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJrmcNLE= +go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs= golang.org/x/crypto v0.42.0 h1:chiH31gIWm57EkTXpwnqf8qeuMUi0yekh6mT2AvFlqI= golang.org/x/crypto v0.42.0/go.mod h1:4+rDnOTJhQCx2q7/j6rAN5XDw8kPjeaXEUR2eL94ix8= -golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b h1:M2rDM6z3Fhozi9O7NWsxAkg/yqS/lQJ6PmkyIV3YP+o= -golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b/go.mod h1:3//PLf8L/X+8b4vuAfHzxeRUl04Adcb341+IGKfnqS8= +golang.org/x/exp v0.0.0-20250911091902-df9299821621 h1:2id6c1/gto0kaHYyrixvknJ8tUK/Qs5IsmBtrc+FtgU= +golang.org/x/exp v0.0.0-20250911091902-df9299821621/go.mod h1:TwQYMMnGpvZyc+JpB/UAuTNIsVJifOlSkrZkhcvpVUk= golang.org/x/mod v0.28.0 h1:gQBtGhjxykdjY9YhZpSlZIsbnaE2+PgjfLWUQTnoZ1U= golang.org/x/mod v0.28.0/go.mod h1:yfB/L0NOf/kmEbXjzCPOx1iK1fRutOydrCMsqRhEBxI= -golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE= -golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg= +golang.org/x/net v0.44.0 h1:evd8IRDyfNBMBTTY5XRF1vaZlD+EmWx6x8PkhR04H/I= +golang.org/x/net v0.44.0/go.mod h1:ECOoLqd5U3Lhyeyo/QDCEVQ4sNgYsqvCZ722XogGieY= golang.org/x/oauth2 v0.31.0 h1:8Fq0yVZLh4j4YA47vHKFTa9Ew5XIrCP8LC6UeNZnLxo= golang.org/x/oauth2 v0.31.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA= golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug= @@ -197,19 +197,21 @@ golang.org/x/term v0.35.0 h1:bZBVKBudEyhRcajGcNc3jIfWPqV4y/Kt2XcoigOWtDQ= golang.org/x/term v0.35.0/go.mod h1:TPGtkTLesOwf2DE8CgVYiZinHAOuy5AYUYT1lENIZnA= golang.org/x/text v0.29.0 h1:1neNs90w9YzJ9BocxfsQNHKuAT4pkghyXc4nhZ6sJvk= golang.org/x/text v0.29.0/go.mod h1:7MhJOA9CD2qZyOKYazxdYMF85OwPdEr9jTtBpO7ydH4= -golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE= -golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= -golang.org/x/tools v0.36.0 h1:kWS0uv/zsvHEle1LbV5LE8QujrxB3wfQyxHfhOk0Qkg= -golang.org/x/tools v0.36.0/go.mod h1:WBDiHKJK8YgLHlcQPYQzNCkUxUypCaa5ZegCVutKm+s= +golang.org/x/time v0.13.0 h1:eUlYslOIt32DgYD6utsuUeHs4d7AsEYLuIAdg7FlYgI= +golang.org/x/time v0.13.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4= +golang.org/x/tools v0.37.0 h1:DVSRzp7FwePZW356yEAChSdNcQo6Nsp+fex1SUW09lE= +golang.org/x/tools v0.37.0/go.mod h1:MBN5QPQtLMHVdvsbtarmTNukZDdgwdwlO5qGacAzF0w= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.238.0 h1:+EldkglWIg/pWjkq97sd+XxH7PxakNYoe/rkSTbnvOs= -google.golang.org/api v0.238.0/go.mod h1:cOVEm2TpdAGHL2z+UwyS+kmlGr3bVWQQ6sYEqkKje50= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 h1:fc6jSaCT0vBduLYZHYrBBNY4dsWuvgyff9noRNDdBeE= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= -google.golang.org/grpc v1.73.0 h1:VIWSmpI2MegBtTuFt5/JWy2oXxtjJ/e89Z70ImfD2ok= -google.golang.org/grpc v1.73.0/go.mod h1:50sbHOUqWoCQGI8V2HQLJM0B+LMlIUjNSZmow7EVBQc= -google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= -google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= +gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= +gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= +google.golang.org/api v0.249.0 h1:0VrsWAKzIZi058aeq+I86uIXbNhm9GxSHpbmZ92a38w= +google.golang.org/api v0.249.0/go.mod h1:dGk9qyI0UYPwO/cjt2q06LG/EhUpwZGdAbYF14wHHrQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250922171735-9219d122eba9 h1:V1jCN2HBa8sySkR5vLcCSqJSTMv093Rw9EJefhQGP7M= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250922171735-9219d122eba9/go.mod h1:HSkG/KdJWusxU1F6CNrwNDjBMgisKxGnc5dAZfT0mjQ= +google.golang.org/grpc v1.75.1 h1:/ODCNEuf9VghjgO3rqLcfg8fiOP0nSluljWFlDxELLI= +google.golang.org/grpc v1.75.1/go.mod h1:JtPAzKiq4v1xcAB2hydNlWI2RnF85XXcV0mhKXr2ecQ= +google.golang.org/protobuf v1.36.9 h1:w2gp2mA27hUeUzj9Ex9FBjsBm40zfaDtEWow293U7Iw= +google.golang.org/protobuf v1.36.9/go.mod h1:fuxRtAxBytpl4zzqUh6/eyUujkJdNiuEkXntxiD/uRU= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=