From b7e0bed38ff6ccb4666933c7e94b44b28ddd0f5f Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Wed, 11 Jun 2025 19:27:51 +0200 Subject: [PATCH 01/22] Generate required fields for DABs --- .../validation/generated/required_fields.go | 570 ++++++++++++++++++ bundle/internal/validation/main.go | 13 + bundle/internal/validation/required.go | 204 +++++++ libs/structdiff/structpath/path.go | 7 + 4 files changed, 794 insertions(+) create mode 100644 bundle/internal/validation/generated/required_fields.go create mode 100644 bundle/internal/validation/main.go create mode 100644 bundle/internal/validation/required.go diff --git a/bundle/internal/validation/generated/required_fields.go b/bundle/internal/validation/generated/required_fields.go new file mode 100644 index 0000000000..a4ce44ab77 --- /dev/null +++ b/bundle/internal/validation/generated/required_fields.go @@ -0,0 +1,570 @@ +package generated + +// THIS FILE IS AUTOGENERATED. +// DO NOT EDIT THIS FILE DIRECTLY. + +import ( + _ "github.com/databricks/cli/libs/dyn" +) + +// RequiredFields maps [dyn.Pattern] to required fields they should have. +var RequiredFields = map[string][]string{ + + "artifacts.*.files[*]": {"source"}, + + "bundle": {"name"}, + + "environments.*.artifacts.*.files[*]": {"source"}, + "environments.*.bundle": {"name"}, + "environments.*.permissions[*]": {"level"}, + "environments.*.resources.apps.*": {"source_code_path", "name"}, + "environments.*.resources.apps.*.permissions[*]": {"level"}, + "environments.*.resources.apps.*.resources[*]": {"name"}, + "environments.*.resources.apps.*.resources[*].job": {"id", "permission"}, + "environments.*.resources.apps.*.resources[*].secret": {"key", "permission", "scope"}, + "environments.*.resources.apps.*.resources[*].serving_endpoint": {"name", "permission"}, + "environments.*.resources.apps.*.resources[*].sql_warehouse": {"id", "permission"}, + "environments.*.resources.apps.*.resources[*].uc_securable": {"permission", "securable_full_name", "securable_type"}, + "environments.*.resources.clusters.*.cluster_log_conf.dbfs": {"destination"}, + "environments.*.resources.clusters.*.cluster_log_conf.s3": {"destination"}, + "environments.*.resources.clusters.*.cluster_log_conf.volumes": {"destination"}, + "environments.*.resources.clusters.*.init_scripts[*].abfss": {"destination"}, + "environments.*.resources.clusters.*.init_scripts[*].dbfs": {"destination"}, + "environments.*.resources.clusters.*.init_scripts[*].file": {"destination"}, + "environments.*.resources.clusters.*.init_scripts[*].gcs": {"destination"}, + "environments.*.resources.clusters.*.init_scripts[*].s3": {"destination"}, + "environments.*.resources.clusters.*.init_scripts[*].volumes": {"destination"}, + "environments.*.resources.clusters.*.init_scripts[*].workspace": {"destination"}, + "environments.*.resources.clusters.*.permissions[*]": {"level"}, + "environments.*.resources.clusters.*.workload_type": {"clients"}, + "environments.*.resources.dashboards.*.permissions[*]": {"level"}, + "environments.*.resources.experiments.*.permissions[*]": {"level"}, + "environments.*.resources.jobs.*.deployment": {"kind"}, + "environments.*.resources.jobs.*.environments[*]": {"environment_key"}, + "environments.*.resources.jobs.*.environments[*].spec": {"client"}, + "environments.*.resources.jobs.*.git_source": {"git_provider", "git_url"}, + "environments.*.resources.jobs.*.git_source.job_source": {"import_from_git_branch", "job_config_path"}, + "environments.*.resources.jobs.*.health.rules[*]": {"metric", "op", "value"}, + "environments.*.resources.jobs.*.job_clusters[*]": {"job_cluster_key", "new_cluster"}, + "environments.*.resources.jobs.*.job_clusters[*].new_cluster.cluster_log_conf.dbfs": {"destination"}, + "environments.*.resources.jobs.*.job_clusters[*].new_cluster.cluster_log_conf.s3": {"destination"}, + "environments.*.resources.jobs.*.job_clusters[*].new_cluster.cluster_log_conf.volumes": {"destination"}, + "environments.*.resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].abfss": {"destination"}, + "environments.*.resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].dbfs": {"destination"}, + "environments.*.resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].file": {"destination"}, + "environments.*.resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].gcs": {"destination"}, + "environments.*.resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].s3": {"destination"}, + "environments.*.resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].volumes": {"destination"}, + "environments.*.resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].workspace": {"destination"}, + "environments.*.resources.jobs.*.job_clusters[*].new_cluster.workload_type": {"clients"}, + "environments.*.resources.jobs.*.parameters[*]": {"default", "name"}, + "environments.*.resources.jobs.*.permissions[*]": {"level"}, + "environments.*.resources.jobs.*.queue": {"enabled"}, + "environments.*.resources.jobs.*.schedule": {"quartz_cron_expression", "timezone_id"}, + "environments.*.resources.jobs.*.tasks[*]": {"task_key"}, + "environments.*.resources.jobs.*.tasks[*].clean_rooms_notebook_task": {"clean_room_name", "notebook_name"}, + "environments.*.resources.jobs.*.tasks[*].condition_task": {"left", "op", "right"}, + "environments.*.resources.jobs.*.tasks[*].dbt_task": {"commands"}, + "environments.*.resources.jobs.*.tasks[*].depends_on[*]": {"task_key"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task": {"inputs", "task"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task": {"task_key"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.clean_rooms_notebook_task": {"clean_room_name", "notebook_name"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.condition_task": {"left", "op", "right"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.dbt_task": {"commands"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.depends_on[*]": {"task_key"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.for_each_task": {"inputs", "task"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.gen_ai_compute_task": {"dl_runtime_image"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.gen_ai_compute_task.compute": {"num_gpus"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.health.rules[*]": {"metric", "op", "value"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.libraries[*].cran": {"package"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.libraries[*].maven": {"coordinates"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.libraries[*].pypi": {"package"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.cluster_log_conf.dbfs": {"destination"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.cluster_log_conf.s3": {"destination"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.cluster_log_conf.volumes": {"destination"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].abfss": {"destination"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].dbfs": {"destination"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].file": {"destination"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].gcs": {"destination"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].s3": {"destination"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].volumes": {"destination"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].workspace": {"destination"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.workload_type": {"clients"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.notebook_task": {"notebook_path"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.pipeline_task": {"pipeline_id"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.python_wheel_task": {"entry_point", "package_name"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.run_job_task": {"job_id"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.spark_python_task": {"python_file"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.sql_task": {"warehouse_id"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.sql_task.alert": {"alert_id"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.sql_task.dashboard": {"dashboard_id"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.sql_task.file": {"path"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.sql_task.query": {"query_id"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.webhook_notifications.on_duration_warning_threshold_exceeded[*]": {"id"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.webhook_notifications.on_failure[*]": {"id"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.webhook_notifications.on_start[*]": {"id"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.webhook_notifications.on_streaming_backlog_exceeded[*]": {"id"}, + "environments.*.resources.jobs.*.tasks[*].for_each_task.task.webhook_notifications.on_success[*]": {"id"}, + "environments.*.resources.jobs.*.tasks[*].gen_ai_compute_task": {"dl_runtime_image"}, + "environments.*.resources.jobs.*.tasks[*].gen_ai_compute_task.compute": {"num_gpus"}, + "environments.*.resources.jobs.*.tasks[*].health.rules[*]": {"metric", "op", "value"}, + "environments.*.resources.jobs.*.tasks[*].libraries[*].cran": {"package"}, + "environments.*.resources.jobs.*.tasks[*].libraries[*].maven": {"coordinates"}, + "environments.*.resources.jobs.*.tasks[*].libraries[*].pypi": {"package"}, + "environments.*.resources.jobs.*.tasks[*].new_cluster.cluster_log_conf.dbfs": {"destination"}, + "environments.*.resources.jobs.*.tasks[*].new_cluster.cluster_log_conf.s3": {"destination"}, + "environments.*.resources.jobs.*.tasks[*].new_cluster.cluster_log_conf.volumes": {"destination"}, + "environments.*.resources.jobs.*.tasks[*].new_cluster.init_scripts[*].abfss": {"destination"}, + "environments.*.resources.jobs.*.tasks[*].new_cluster.init_scripts[*].dbfs": {"destination"}, + "environments.*.resources.jobs.*.tasks[*].new_cluster.init_scripts[*].file": {"destination"}, + "environments.*.resources.jobs.*.tasks[*].new_cluster.init_scripts[*].gcs": {"destination"}, + "environments.*.resources.jobs.*.tasks[*].new_cluster.init_scripts[*].s3": {"destination"}, + "environments.*.resources.jobs.*.tasks[*].new_cluster.init_scripts[*].volumes": {"destination"}, + "environments.*.resources.jobs.*.tasks[*].new_cluster.init_scripts[*].workspace": {"destination"}, + "environments.*.resources.jobs.*.tasks[*].new_cluster.workload_type": {"clients"}, + "environments.*.resources.jobs.*.tasks[*].notebook_task": {"notebook_path"}, + "environments.*.resources.jobs.*.tasks[*].pipeline_task": {"pipeline_id"}, + "environments.*.resources.jobs.*.tasks[*].python_wheel_task": {"entry_point", "package_name"}, + "environments.*.resources.jobs.*.tasks[*].run_job_task": {"job_id"}, + "environments.*.resources.jobs.*.tasks[*].spark_python_task": {"python_file"}, + "environments.*.resources.jobs.*.tasks[*].sql_task": {"warehouse_id"}, + "environments.*.resources.jobs.*.tasks[*].sql_task.alert": {"alert_id"}, + "environments.*.resources.jobs.*.tasks[*].sql_task.dashboard": {"dashboard_id"}, + "environments.*.resources.jobs.*.tasks[*].sql_task.file": {"path"}, + "environments.*.resources.jobs.*.tasks[*].sql_task.query": {"query_id"}, + "environments.*.resources.jobs.*.tasks[*].webhook_notifications.on_duration_warning_threshold_exceeded[*]": {"id"}, + "environments.*.resources.jobs.*.tasks[*].webhook_notifications.on_failure[*]": {"id"}, + "environments.*.resources.jobs.*.tasks[*].webhook_notifications.on_start[*]": {"id"}, + "environments.*.resources.jobs.*.tasks[*].webhook_notifications.on_streaming_backlog_exceeded[*]": {"id"}, + "environments.*.resources.jobs.*.tasks[*].webhook_notifications.on_success[*]": {"id"}, + "environments.*.resources.jobs.*.trigger.file_arrival": {"url"}, + "environments.*.resources.jobs.*.trigger.periodic": {"interval", "unit"}, + "environments.*.resources.jobs.*.webhook_notifications.on_duration_warning_threshold_exceeded[*]": {"id"}, + "environments.*.resources.jobs.*.webhook_notifications.on_failure[*]": {"id"}, + "environments.*.resources.jobs.*.webhook_notifications.on_start[*]": {"id"}, + "environments.*.resources.jobs.*.webhook_notifications.on_streaming_backlog_exceeded[*]": {"id"}, + "environments.*.resources.jobs.*.webhook_notifications.on_success[*]": {"id"}, + "environments.*.resources.model_serving_endpoints.*": {"name"}, + "environments.*.resources.model_serving_endpoints.*.ai_gateway.fallback_config": {"enabled"}, + "environments.*.resources.model_serving_endpoints.*.ai_gateway.rate_limits[*]": {"calls", "renewal_period"}, + "environments.*.resources.model_serving_endpoints.*.config.served_entities[*].external_model": {"name", "provider", "task"}, + "environments.*.resources.model_serving_endpoints.*.config.served_entities[*].external_model.amazon_bedrock_config": {"aws_region", "bedrock_provider"}, + "environments.*.resources.model_serving_endpoints.*.config.served_entities[*].external_model.custom_provider_config": {"custom_provider_url"}, + "environments.*.resources.model_serving_endpoints.*.config.served_entities[*].external_model.custom_provider_config.api_key_auth": {"key"}, + "environments.*.resources.model_serving_endpoints.*.config.served_entities[*].external_model.databricks_model_serving_config": {"databricks_workspace_url"}, + "environments.*.resources.model_serving_endpoints.*.config.served_entities[*].external_model.google_cloud_vertex_ai_config": {"project_id", "region"}, + "environments.*.resources.model_serving_endpoints.*.config.served_models[*]": {"model_name", "model_version", "scale_to_zero_enabled"}, + "environments.*.resources.model_serving_endpoints.*.config.traffic_config.routes[*]": {"served_model_name", "traffic_percentage"}, + "environments.*.resources.model_serving_endpoints.*.permissions[*]": {"level"}, + "environments.*.resources.model_serving_endpoints.*.rate_limits[*]": {"calls", "renewal_period"}, + "environments.*.resources.model_serving_endpoints.*.tags[*]": {"key"}, + "environments.*.resources.models.*": {"name"}, + "environments.*.resources.models.*.permissions[*]": {"level"}, + "environments.*.resources.pipelines.*.clusters[*].autoscale": {"max_workers", "min_workers"}, + "environments.*.resources.pipelines.*.clusters[*].cluster_log_conf.dbfs": {"destination"}, + "environments.*.resources.pipelines.*.clusters[*].cluster_log_conf.s3": {"destination"}, + "environments.*.resources.pipelines.*.clusters[*].cluster_log_conf.volumes": {"destination"}, + "environments.*.resources.pipelines.*.clusters[*].init_scripts[*].abfss": {"destination"}, + "environments.*.resources.pipelines.*.clusters[*].init_scripts[*].dbfs": {"destination"}, + "environments.*.resources.pipelines.*.clusters[*].init_scripts[*].file": {"destination"}, + "environments.*.resources.pipelines.*.clusters[*].init_scripts[*].gcs": {"destination"}, + "environments.*.resources.pipelines.*.clusters[*].init_scripts[*].s3": {"destination"}, + "environments.*.resources.pipelines.*.clusters[*].init_scripts[*].volumes": {"destination"}, + "environments.*.resources.pipelines.*.clusters[*].init_scripts[*].workspace": {"destination"}, + "environments.*.resources.pipelines.*.deployment": {"kind"}, + "environments.*.resources.pipelines.*.gateway_definition": {"connection_name", "gateway_storage_catalog", "gateway_storage_schema"}, + "environments.*.resources.pipelines.*.ingestion_definition.objects[*].report": {"destination_catalog", "destination_schema", "source_url"}, + "environments.*.resources.pipelines.*.ingestion_definition.objects[*].schema": {"destination_catalog", "destination_schema", "source_schema"}, + "environments.*.resources.pipelines.*.ingestion_definition.objects[*].table": {"destination_catalog", "destination_schema", "source_table"}, + "environments.*.resources.pipelines.*.libraries[*].maven": {"coordinates"}, + "environments.*.resources.pipelines.*.permissions[*]": {"level"}, + "environments.*.resources.pipelines.*.restart_window": {"start_hour"}, + "environments.*.resources.quality_monitors.*": {"table_name", "assets_dir", "output_schema_name"}, + "environments.*.resources.quality_monitors.*.custom_metrics[*]": {"definition", "input_columns", "name", "output_data_type", "type"}, + "environments.*.resources.quality_monitors.*.inference_log": {"granularities", "model_id_col", "prediction_col", "problem_type", "timestamp_col"}, + "environments.*.resources.quality_monitors.*.schedule": {"quartz_cron_expression", "timezone_id"}, + "environments.*.resources.quality_monitors.*.time_series": {"granularities", "timestamp_col"}, + "environments.*.resources.registered_models.*": {"catalog_name", "name", "schema_name"}, + "environments.*.resources.registered_models.*.grants[*]": {"privileges", "principal"}, + "environments.*.resources.schemas.*": {"catalog_name", "name"}, + "environments.*.resources.schemas.*.grants[*]": {"privileges", "principal"}, + "environments.*.resources.secret_scopes.*": {"name"}, + "environments.*.resources.secret_scopes.*.keyvault_metadata": {"dns_name", "resource_id"}, + "environments.*.resources.secret_scopes.*.permissions[*]": {"level"}, + "environments.*.resources.volumes.*": {"catalog_name", "name", "schema_name", "volume_type"}, + "environments.*.resources.volumes.*.grants[*]": {"privileges", "principal"}, + + "permissions[*]": {"level"}, + + "resources.apps.*": {"source_code_path", "name"}, + "resources.apps.*.permissions[*]": {"level"}, + "resources.apps.*.resources[*]": {"name"}, + "resources.apps.*.resources[*].job": {"id", "permission"}, + "resources.apps.*.resources[*].secret": {"key", "permission", "scope"}, + "resources.apps.*.resources[*].serving_endpoint": {"name", "permission"}, + "resources.apps.*.resources[*].sql_warehouse": {"id", "permission"}, + "resources.apps.*.resources[*].uc_securable": {"permission", "securable_full_name", "securable_type"}, + + "resources.clusters.*.cluster_log_conf.dbfs": {"destination"}, + "resources.clusters.*.cluster_log_conf.s3": {"destination"}, + "resources.clusters.*.cluster_log_conf.volumes": {"destination"}, + "resources.clusters.*.init_scripts[*].abfss": {"destination"}, + "resources.clusters.*.init_scripts[*].dbfs": {"destination"}, + "resources.clusters.*.init_scripts[*].file": {"destination"}, + "resources.clusters.*.init_scripts[*].gcs": {"destination"}, + "resources.clusters.*.init_scripts[*].s3": {"destination"}, + "resources.clusters.*.init_scripts[*].volumes": {"destination"}, + "resources.clusters.*.init_scripts[*].workspace": {"destination"}, + "resources.clusters.*.permissions[*]": {"level"}, + "resources.clusters.*.workload_type": {"clients"}, + + "resources.dashboards.*.permissions[*]": {"level"}, + + "resources.experiments.*.permissions[*]": {"level"}, + + "resources.jobs.*.deployment": {"kind"}, + "resources.jobs.*.environments[*]": {"environment_key"}, + "resources.jobs.*.environments[*].spec": {"client"}, + "resources.jobs.*.git_source": {"git_provider", "git_url"}, + "resources.jobs.*.git_source.job_source": {"import_from_git_branch", "job_config_path"}, + "resources.jobs.*.health.rules[*]": {"metric", "op", "value"}, + "resources.jobs.*.job_clusters[*]": {"job_cluster_key", "new_cluster"}, + "resources.jobs.*.job_clusters[*].new_cluster.cluster_log_conf.dbfs": {"destination"}, + "resources.jobs.*.job_clusters[*].new_cluster.cluster_log_conf.s3": {"destination"}, + "resources.jobs.*.job_clusters[*].new_cluster.cluster_log_conf.volumes": {"destination"}, + "resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].abfss": {"destination"}, + "resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].dbfs": {"destination"}, + "resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].file": {"destination"}, + "resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].gcs": {"destination"}, + "resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].s3": {"destination"}, + "resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].volumes": {"destination"}, + "resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].workspace": {"destination"}, + "resources.jobs.*.job_clusters[*].new_cluster.workload_type": {"clients"}, + "resources.jobs.*.parameters[*]": {"default", "name"}, + "resources.jobs.*.permissions[*]": {"level"}, + "resources.jobs.*.queue": {"enabled"}, + "resources.jobs.*.schedule": {"quartz_cron_expression", "timezone_id"}, + "resources.jobs.*.tasks[*]": {"task_key"}, + "resources.jobs.*.tasks[*].clean_rooms_notebook_task": {"clean_room_name", "notebook_name"}, + "resources.jobs.*.tasks[*].condition_task": {"left", "op", "right"}, + "resources.jobs.*.tasks[*].dbt_task": {"commands"}, + "resources.jobs.*.tasks[*].depends_on[*]": {"task_key"}, + "resources.jobs.*.tasks[*].for_each_task": {"inputs", "task"}, + "resources.jobs.*.tasks[*].for_each_task.task": {"task_key"}, + "resources.jobs.*.tasks[*].for_each_task.task.clean_rooms_notebook_task": {"clean_room_name", "notebook_name"}, + "resources.jobs.*.tasks[*].for_each_task.task.condition_task": {"left", "op", "right"}, + "resources.jobs.*.tasks[*].for_each_task.task.dbt_task": {"commands"}, + "resources.jobs.*.tasks[*].for_each_task.task.depends_on[*]": {"task_key"}, + "resources.jobs.*.tasks[*].for_each_task.task.for_each_task": {"inputs", "task"}, + "resources.jobs.*.tasks[*].for_each_task.task.gen_ai_compute_task": {"dl_runtime_image"}, + "resources.jobs.*.tasks[*].for_each_task.task.gen_ai_compute_task.compute": {"num_gpus"}, + "resources.jobs.*.tasks[*].for_each_task.task.health.rules[*]": {"metric", "op", "value"}, + "resources.jobs.*.tasks[*].for_each_task.task.libraries[*].cran": {"package"}, + "resources.jobs.*.tasks[*].for_each_task.task.libraries[*].maven": {"coordinates"}, + "resources.jobs.*.tasks[*].for_each_task.task.libraries[*].pypi": {"package"}, + "resources.jobs.*.tasks[*].for_each_task.task.new_cluster.cluster_log_conf.dbfs": {"destination"}, + "resources.jobs.*.tasks[*].for_each_task.task.new_cluster.cluster_log_conf.s3": {"destination"}, + "resources.jobs.*.tasks[*].for_each_task.task.new_cluster.cluster_log_conf.volumes": {"destination"}, + "resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].abfss": {"destination"}, + "resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].dbfs": {"destination"}, + "resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].file": {"destination"}, + "resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].gcs": {"destination"}, + "resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].s3": {"destination"}, + "resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].volumes": {"destination"}, + "resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].workspace": {"destination"}, + "resources.jobs.*.tasks[*].for_each_task.task.new_cluster.workload_type": {"clients"}, + "resources.jobs.*.tasks[*].for_each_task.task.notebook_task": {"notebook_path"}, + "resources.jobs.*.tasks[*].for_each_task.task.pipeline_task": {"pipeline_id"}, + "resources.jobs.*.tasks[*].for_each_task.task.python_wheel_task": {"entry_point", "package_name"}, + "resources.jobs.*.tasks[*].for_each_task.task.run_job_task": {"job_id"}, + "resources.jobs.*.tasks[*].for_each_task.task.spark_python_task": {"python_file"}, + "resources.jobs.*.tasks[*].for_each_task.task.sql_task": {"warehouse_id"}, + "resources.jobs.*.tasks[*].for_each_task.task.sql_task.alert": {"alert_id"}, + "resources.jobs.*.tasks[*].for_each_task.task.sql_task.dashboard": {"dashboard_id"}, + "resources.jobs.*.tasks[*].for_each_task.task.sql_task.file": {"path"}, + "resources.jobs.*.tasks[*].for_each_task.task.sql_task.query": {"query_id"}, + "resources.jobs.*.tasks[*].for_each_task.task.webhook_notifications.on_duration_warning_threshold_exceeded[*]": {"id"}, + "resources.jobs.*.tasks[*].for_each_task.task.webhook_notifications.on_failure[*]": {"id"}, + "resources.jobs.*.tasks[*].for_each_task.task.webhook_notifications.on_start[*]": {"id"}, + "resources.jobs.*.tasks[*].for_each_task.task.webhook_notifications.on_streaming_backlog_exceeded[*]": {"id"}, + "resources.jobs.*.tasks[*].for_each_task.task.webhook_notifications.on_success[*]": {"id"}, + "resources.jobs.*.tasks[*].gen_ai_compute_task": {"dl_runtime_image"}, + "resources.jobs.*.tasks[*].gen_ai_compute_task.compute": {"num_gpus"}, + "resources.jobs.*.tasks[*].health.rules[*]": {"metric", "op", "value"}, + "resources.jobs.*.tasks[*].libraries[*].cran": {"package"}, + "resources.jobs.*.tasks[*].libraries[*].maven": {"coordinates"}, + "resources.jobs.*.tasks[*].libraries[*].pypi": {"package"}, + "resources.jobs.*.tasks[*].new_cluster.cluster_log_conf.dbfs": {"destination"}, + "resources.jobs.*.tasks[*].new_cluster.cluster_log_conf.s3": {"destination"}, + "resources.jobs.*.tasks[*].new_cluster.cluster_log_conf.volumes": {"destination"}, + "resources.jobs.*.tasks[*].new_cluster.init_scripts[*].abfss": {"destination"}, + "resources.jobs.*.tasks[*].new_cluster.init_scripts[*].dbfs": {"destination"}, + "resources.jobs.*.tasks[*].new_cluster.init_scripts[*].file": {"destination"}, + "resources.jobs.*.tasks[*].new_cluster.init_scripts[*].gcs": {"destination"}, + "resources.jobs.*.tasks[*].new_cluster.init_scripts[*].s3": {"destination"}, + "resources.jobs.*.tasks[*].new_cluster.init_scripts[*].volumes": {"destination"}, + "resources.jobs.*.tasks[*].new_cluster.init_scripts[*].workspace": {"destination"}, + "resources.jobs.*.tasks[*].new_cluster.workload_type": {"clients"}, + "resources.jobs.*.tasks[*].notebook_task": {"notebook_path"}, + "resources.jobs.*.tasks[*].pipeline_task": {"pipeline_id"}, + "resources.jobs.*.tasks[*].python_wheel_task": {"entry_point", "package_name"}, + "resources.jobs.*.tasks[*].run_job_task": {"job_id"}, + "resources.jobs.*.tasks[*].spark_python_task": {"python_file"}, + "resources.jobs.*.tasks[*].sql_task": {"warehouse_id"}, + "resources.jobs.*.tasks[*].sql_task.alert": {"alert_id"}, + "resources.jobs.*.tasks[*].sql_task.dashboard": {"dashboard_id"}, + "resources.jobs.*.tasks[*].sql_task.file": {"path"}, + "resources.jobs.*.tasks[*].sql_task.query": {"query_id"}, + "resources.jobs.*.tasks[*].webhook_notifications.on_duration_warning_threshold_exceeded[*]": {"id"}, + "resources.jobs.*.tasks[*].webhook_notifications.on_failure[*]": {"id"}, + "resources.jobs.*.tasks[*].webhook_notifications.on_start[*]": {"id"}, + "resources.jobs.*.tasks[*].webhook_notifications.on_streaming_backlog_exceeded[*]": {"id"}, + "resources.jobs.*.tasks[*].webhook_notifications.on_success[*]": {"id"}, + "resources.jobs.*.trigger.file_arrival": {"url"}, + "resources.jobs.*.trigger.periodic": {"interval", "unit"}, + "resources.jobs.*.webhook_notifications.on_duration_warning_threshold_exceeded[*]": {"id"}, + "resources.jobs.*.webhook_notifications.on_failure[*]": {"id"}, + "resources.jobs.*.webhook_notifications.on_start[*]": {"id"}, + "resources.jobs.*.webhook_notifications.on_streaming_backlog_exceeded[*]": {"id"}, + "resources.jobs.*.webhook_notifications.on_success[*]": {"id"}, + + "resources.model_serving_endpoints.*": {"name"}, + "resources.model_serving_endpoints.*.ai_gateway.fallback_config": {"enabled"}, + "resources.model_serving_endpoints.*.ai_gateway.rate_limits[*]": {"calls", "renewal_period"}, + "resources.model_serving_endpoints.*.config.served_entities[*].external_model": {"name", "provider", "task"}, + "resources.model_serving_endpoints.*.config.served_entities[*].external_model.amazon_bedrock_config": {"aws_region", "bedrock_provider"}, + "resources.model_serving_endpoints.*.config.served_entities[*].external_model.custom_provider_config": {"custom_provider_url"}, + "resources.model_serving_endpoints.*.config.served_entities[*].external_model.custom_provider_config.api_key_auth": {"key"}, + "resources.model_serving_endpoints.*.config.served_entities[*].external_model.databricks_model_serving_config": {"databricks_workspace_url"}, + "resources.model_serving_endpoints.*.config.served_entities[*].external_model.google_cloud_vertex_ai_config": {"project_id", "region"}, + "resources.model_serving_endpoints.*.config.served_models[*]": {"model_name", "model_version", "scale_to_zero_enabled"}, + "resources.model_serving_endpoints.*.config.traffic_config.routes[*]": {"served_model_name", "traffic_percentage"}, + "resources.model_serving_endpoints.*.permissions[*]": {"level"}, + "resources.model_serving_endpoints.*.rate_limits[*]": {"calls", "renewal_period"}, + "resources.model_serving_endpoints.*.tags[*]": {"key"}, + + "resources.models.*": {"name"}, + "resources.models.*.permissions[*]": {"level"}, + + "resources.pipelines.*.clusters[*].autoscale": {"max_workers", "min_workers"}, + "resources.pipelines.*.clusters[*].cluster_log_conf.dbfs": {"destination"}, + "resources.pipelines.*.clusters[*].cluster_log_conf.s3": {"destination"}, + "resources.pipelines.*.clusters[*].cluster_log_conf.volumes": {"destination"}, + "resources.pipelines.*.clusters[*].init_scripts[*].abfss": {"destination"}, + "resources.pipelines.*.clusters[*].init_scripts[*].dbfs": {"destination"}, + "resources.pipelines.*.clusters[*].init_scripts[*].file": {"destination"}, + "resources.pipelines.*.clusters[*].init_scripts[*].gcs": {"destination"}, + "resources.pipelines.*.clusters[*].init_scripts[*].s3": {"destination"}, + "resources.pipelines.*.clusters[*].init_scripts[*].volumes": {"destination"}, + "resources.pipelines.*.clusters[*].init_scripts[*].workspace": {"destination"}, + "resources.pipelines.*.deployment": {"kind"}, + "resources.pipelines.*.gateway_definition": {"connection_name", "gateway_storage_catalog", "gateway_storage_schema"}, + "resources.pipelines.*.ingestion_definition.objects[*].report": {"destination_catalog", "destination_schema", "source_url"}, + "resources.pipelines.*.ingestion_definition.objects[*].schema": {"destination_catalog", "destination_schema", "source_schema"}, + "resources.pipelines.*.ingestion_definition.objects[*].table": {"destination_catalog", "destination_schema", "source_table"}, + "resources.pipelines.*.libraries[*].maven": {"coordinates"}, + "resources.pipelines.*.permissions[*]": {"level"}, + "resources.pipelines.*.restart_window": {"start_hour"}, + + "resources.quality_monitors.*": {"table_name", "assets_dir", "output_schema_name"}, + "resources.quality_monitors.*.custom_metrics[*]": {"definition", "input_columns", "name", "output_data_type", "type"}, + "resources.quality_monitors.*.inference_log": {"granularities", "model_id_col", "prediction_col", "problem_type", "timestamp_col"}, + "resources.quality_monitors.*.schedule": {"quartz_cron_expression", "timezone_id"}, + "resources.quality_monitors.*.time_series": {"granularities", "timestamp_col"}, + + "resources.registered_models.*": {"catalog_name", "name", "schema_name"}, + "resources.registered_models.*.grants[*]": {"privileges", "principal"}, + + "resources.schemas.*": {"catalog_name", "name"}, + "resources.schemas.*.grants[*]": {"privileges", "principal"}, + + "resources.secret_scopes.*": {"name"}, + "resources.secret_scopes.*.keyvault_metadata": {"dns_name", "resource_id"}, + "resources.secret_scopes.*.permissions[*]": {"level"}, + + "resources.volumes.*": {"catalog_name", "name", "schema_name", "volume_type"}, + "resources.volumes.*.grants[*]": {"privileges", "principal"}, + + "targets.*.artifacts.*.files[*]": {"source"}, + + "targets.*.bundle": {"name"}, + + "targets.*.permissions[*]": {"level"}, + + "targets.*.resources.apps.*": {"source_code_path", "name"}, + "targets.*.resources.apps.*.permissions[*]": {"level"}, + "targets.*.resources.apps.*.resources[*]": {"name"}, + "targets.*.resources.apps.*.resources[*].job": {"id", "permission"}, + "targets.*.resources.apps.*.resources[*].secret": {"key", "permission", "scope"}, + "targets.*.resources.apps.*.resources[*].serving_endpoint": {"name", "permission"}, + "targets.*.resources.apps.*.resources[*].sql_warehouse": {"id", "permission"}, + "targets.*.resources.apps.*.resources[*].uc_securable": {"permission", "securable_full_name", "securable_type"}, + "targets.*.resources.clusters.*.cluster_log_conf.dbfs": {"destination"}, + "targets.*.resources.clusters.*.cluster_log_conf.s3": {"destination"}, + "targets.*.resources.clusters.*.cluster_log_conf.volumes": {"destination"}, + "targets.*.resources.clusters.*.init_scripts[*].abfss": {"destination"}, + "targets.*.resources.clusters.*.init_scripts[*].dbfs": {"destination"}, + "targets.*.resources.clusters.*.init_scripts[*].file": {"destination"}, + "targets.*.resources.clusters.*.init_scripts[*].gcs": {"destination"}, + "targets.*.resources.clusters.*.init_scripts[*].s3": {"destination"}, + "targets.*.resources.clusters.*.init_scripts[*].volumes": {"destination"}, + "targets.*.resources.clusters.*.init_scripts[*].workspace": {"destination"}, + "targets.*.resources.clusters.*.permissions[*]": {"level"}, + "targets.*.resources.clusters.*.workload_type": {"clients"}, + "targets.*.resources.dashboards.*.permissions[*]": {"level"}, + "targets.*.resources.experiments.*.permissions[*]": {"level"}, + "targets.*.resources.jobs.*.deployment": {"kind"}, + "targets.*.resources.jobs.*.environments[*]": {"environment_key"}, + "targets.*.resources.jobs.*.environments[*].spec": {"client"}, + "targets.*.resources.jobs.*.git_source": {"git_provider", "git_url"}, + "targets.*.resources.jobs.*.git_source.job_source": {"import_from_git_branch", "job_config_path"}, + "targets.*.resources.jobs.*.health.rules[*]": {"metric", "op", "value"}, + "targets.*.resources.jobs.*.job_clusters[*]": {"job_cluster_key", "new_cluster"}, + "targets.*.resources.jobs.*.job_clusters[*].new_cluster.cluster_log_conf.dbfs": {"destination"}, + "targets.*.resources.jobs.*.job_clusters[*].new_cluster.cluster_log_conf.s3": {"destination"}, + "targets.*.resources.jobs.*.job_clusters[*].new_cluster.cluster_log_conf.volumes": {"destination"}, + "targets.*.resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].abfss": {"destination"}, + "targets.*.resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].dbfs": {"destination"}, + "targets.*.resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].file": {"destination"}, + "targets.*.resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].gcs": {"destination"}, + "targets.*.resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].s3": {"destination"}, + "targets.*.resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].volumes": {"destination"}, + "targets.*.resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].workspace": {"destination"}, + "targets.*.resources.jobs.*.job_clusters[*].new_cluster.workload_type": {"clients"}, + "targets.*.resources.jobs.*.parameters[*]": {"default", "name"}, + "targets.*.resources.jobs.*.permissions[*]": {"level"}, + "targets.*.resources.jobs.*.queue": {"enabled"}, + "targets.*.resources.jobs.*.schedule": {"quartz_cron_expression", "timezone_id"}, + "targets.*.resources.jobs.*.tasks[*]": {"task_key"}, + "targets.*.resources.jobs.*.tasks[*].clean_rooms_notebook_task": {"clean_room_name", "notebook_name"}, + "targets.*.resources.jobs.*.tasks[*].condition_task": {"left", "op", "right"}, + "targets.*.resources.jobs.*.tasks[*].dbt_task": {"commands"}, + "targets.*.resources.jobs.*.tasks[*].depends_on[*]": {"task_key"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task": {"inputs", "task"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task": {"task_key"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.clean_rooms_notebook_task": {"clean_room_name", "notebook_name"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.condition_task": {"left", "op", "right"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.dbt_task": {"commands"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.depends_on[*]": {"task_key"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.for_each_task": {"inputs", "task"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.gen_ai_compute_task": {"dl_runtime_image"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.gen_ai_compute_task.compute": {"num_gpus"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.health.rules[*]": {"metric", "op", "value"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.libraries[*].cran": {"package"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.libraries[*].maven": {"coordinates"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.libraries[*].pypi": {"package"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.cluster_log_conf.dbfs": {"destination"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.cluster_log_conf.s3": {"destination"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.cluster_log_conf.volumes": {"destination"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].abfss": {"destination"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].dbfs": {"destination"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].file": {"destination"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].gcs": {"destination"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].s3": {"destination"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].volumes": {"destination"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].workspace": {"destination"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.workload_type": {"clients"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.notebook_task": {"notebook_path"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.pipeline_task": {"pipeline_id"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.python_wheel_task": {"entry_point", "package_name"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.run_job_task": {"job_id"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.spark_python_task": {"python_file"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.sql_task": {"warehouse_id"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.sql_task.alert": {"alert_id"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.sql_task.dashboard": {"dashboard_id"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.sql_task.file": {"path"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.sql_task.query": {"query_id"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.webhook_notifications.on_duration_warning_threshold_exceeded[*]": {"id"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.webhook_notifications.on_failure[*]": {"id"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.webhook_notifications.on_start[*]": {"id"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.webhook_notifications.on_streaming_backlog_exceeded[*]": {"id"}, + "targets.*.resources.jobs.*.tasks[*].for_each_task.task.webhook_notifications.on_success[*]": {"id"}, + "targets.*.resources.jobs.*.tasks[*].gen_ai_compute_task": {"dl_runtime_image"}, + "targets.*.resources.jobs.*.tasks[*].gen_ai_compute_task.compute": {"num_gpus"}, + "targets.*.resources.jobs.*.tasks[*].health.rules[*]": {"metric", "op", "value"}, + "targets.*.resources.jobs.*.tasks[*].libraries[*].cran": {"package"}, + "targets.*.resources.jobs.*.tasks[*].libraries[*].maven": {"coordinates"}, + "targets.*.resources.jobs.*.tasks[*].libraries[*].pypi": {"package"}, + "targets.*.resources.jobs.*.tasks[*].new_cluster.cluster_log_conf.dbfs": {"destination"}, + "targets.*.resources.jobs.*.tasks[*].new_cluster.cluster_log_conf.s3": {"destination"}, + "targets.*.resources.jobs.*.tasks[*].new_cluster.cluster_log_conf.volumes": {"destination"}, + "targets.*.resources.jobs.*.tasks[*].new_cluster.init_scripts[*].abfss": {"destination"}, + "targets.*.resources.jobs.*.tasks[*].new_cluster.init_scripts[*].dbfs": {"destination"}, + "targets.*.resources.jobs.*.tasks[*].new_cluster.init_scripts[*].file": {"destination"}, + "targets.*.resources.jobs.*.tasks[*].new_cluster.init_scripts[*].gcs": {"destination"}, + "targets.*.resources.jobs.*.tasks[*].new_cluster.init_scripts[*].s3": {"destination"}, + "targets.*.resources.jobs.*.tasks[*].new_cluster.init_scripts[*].volumes": {"destination"}, + "targets.*.resources.jobs.*.tasks[*].new_cluster.init_scripts[*].workspace": {"destination"}, + "targets.*.resources.jobs.*.tasks[*].new_cluster.workload_type": {"clients"}, + "targets.*.resources.jobs.*.tasks[*].notebook_task": {"notebook_path"}, + "targets.*.resources.jobs.*.tasks[*].pipeline_task": {"pipeline_id"}, + "targets.*.resources.jobs.*.tasks[*].python_wheel_task": {"entry_point", "package_name"}, + "targets.*.resources.jobs.*.tasks[*].run_job_task": {"job_id"}, + "targets.*.resources.jobs.*.tasks[*].spark_python_task": {"python_file"}, + "targets.*.resources.jobs.*.tasks[*].sql_task": {"warehouse_id"}, + "targets.*.resources.jobs.*.tasks[*].sql_task.alert": {"alert_id"}, + "targets.*.resources.jobs.*.tasks[*].sql_task.dashboard": {"dashboard_id"}, + "targets.*.resources.jobs.*.tasks[*].sql_task.file": {"path"}, + "targets.*.resources.jobs.*.tasks[*].sql_task.query": {"query_id"}, + "targets.*.resources.jobs.*.tasks[*].webhook_notifications.on_duration_warning_threshold_exceeded[*]": {"id"}, + "targets.*.resources.jobs.*.tasks[*].webhook_notifications.on_failure[*]": {"id"}, + "targets.*.resources.jobs.*.tasks[*].webhook_notifications.on_start[*]": {"id"}, + "targets.*.resources.jobs.*.tasks[*].webhook_notifications.on_streaming_backlog_exceeded[*]": {"id"}, + "targets.*.resources.jobs.*.tasks[*].webhook_notifications.on_success[*]": {"id"}, + "targets.*.resources.jobs.*.trigger.file_arrival": {"url"}, + "targets.*.resources.jobs.*.trigger.periodic": {"interval", "unit"}, + "targets.*.resources.jobs.*.webhook_notifications.on_duration_warning_threshold_exceeded[*]": {"id"}, + "targets.*.resources.jobs.*.webhook_notifications.on_failure[*]": {"id"}, + "targets.*.resources.jobs.*.webhook_notifications.on_start[*]": {"id"}, + "targets.*.resources.jobs.*.webhook_notifications.on_streaming_backlog_exceeded[*]": {"id"}, + "targets.*.resources.jobs.*.webhook_notifications.on_success[*]": {"id"}, + "targets.*.resources.model_serving_endpoints.*": {"name"}, + "targets.*.resources.model_serving_endpoints.*.ai_gateway.fallback_config": {"enabled"}, + "targets.*.resources.model_serving_endpoints.*.ai_gateway.rate_limits[*]": {"calls", "renewal_period"}, + "targets.*.resources.model_serving_endpoints.*.config.served_entities[*].external_model": {"name", "provider", "task"}, + "targets.*.resources.model_serving_endpoints.*.config.served_entities[*].external_model.amazon_bedrock_config": {"aws_region", "bedrock_provider"}, + "targets.*.resources.model_serving_endpoints.*.config.served_entities[*].external_model.custom_provider_config": {"custom_provider_url"}, + "targets.*.resources.model_serving_endpoints.*.config.served_entities[*].external_model.custom_provider_config.api_key_auth": {"key"}, + "targets.*.resources.model_serving_endpoints.*.config.served_entities[*].external_model.databricks_model_serving_config": {"databricks_workspace_url"}, + "targets.*.resources.model_serving_endpoints.*.config.served_entities[*].external_model.google_cloud_vertex_ai_config": {"project_id", "region"}, + "targets.*.resources.model_serving_endpoints.*.config.served_models[*]": {"model_name", "model_version", "scale_to_zero_enabled"}, + "targets.*.resources.model_serving_endpoints.*.config.traffic_config.routes[*]": {"served_model_name", "traffic_percentage"}, + "targets.*.resources.model_serving_endpoints.*.permissions[*]": {"level"}, + "targets.*.resources.model_serving_endpoints.*.rate_limits[*]": {"calls", "renewal_period"}, + "targets.*.resources.model_serving_endpoints.*.tags[*]": {"key"}, + "targets.*.resources.models.*": {"name"}, + "targets.*.resources.models.*.permissions[*]": {"level"}, + "targets.*.resources.pipelines.*.clusters[*].autoscale": {"max_workers", "min_workers"}, + "targets.*.resources.pipelines.*.clusters[*].cluster_log_conf.dbfs": {"destination"}, + "targets.*.resources.pipelines.*.clusters[*].cluster_log_conf.s3": {"destination"}, + "targets.*.resources.pipelines.*.clusters[*].cluster_log_conf.volumes": {"destination"}, + "targets.*.resources.pipelines.*.clusters[*].init_scripts[*].abfss": {"destination"}, + "targets.*.resources.pipelines.*.clusters[*].init_scripts[*].dbfs": {"destination"}, + "targets.*.resources.pipelines.*.clusters[*].init_scripts[*].file": {"destination"}, + "targets.*.resources.pipelines.*.clusters[*].init_scripts[*].gcs": {"destination"}, + "targets.*.resources.pipelines.*.clusters[*].init_scripts[*].s3": {"destination"}, + "targets.*.resources.pipelines.*.clusters[*].init_scripts[*].volumes": {"destination"}, + "targets.*.resources.pipelines.*.clusters[*].init_scripts[*].workspace": {"destination"}, + "targets.*.resources.pipelines.*.deployment": {"kind"}, + "targets.*.resources.pipelines.*.gateway_definition": {"connection_name", "gateway_storage_catalog", "gateway_storage_schema"}, + "targets.*.resources.pipelines.*.ingestion_definition.objects[*].report": {"destination_catalog", "destination_schema", "source_url"}, + "targets.*.resources.pipelines.*.ingestion_definition.objects[*].schema": {"destination_catalog", "destination_schema", "source_schema"}, + "targets.*.resources.pipelines.*.ingestion_definition.objects[*].table": {"destination_catalog", "destination_schema", "source_table"}, + "targets.*.resources.pipelines.*.libraries[*].maven": {"coordinates"}, + "targets.*.resources.pipelines.*.permissions[*]": {"level"}, + "targets.*.resources.pipelines.*.restart_window": {"start_hour"}, + "targets.*.resources.quality_monitors.*": {"table_name", "assets_dir", "output_schema_name"}, + "targets.*.resources.quality_monitors.*.custom_metrics[*]": {"definition", "input_columns", "name", "output_data_type", "type"}, + "targets.*.resources.quality_monitors.*.inference_log": {"granularities", "model_id_col", "prediction_col", "problem_type", "timestamp_col"}, + "targets.*.resources.quality_monitors.*.schedule": {"quartz_cron_expression", "timezone_id"}, + "targets.*.resources.quality_monitors.*.time_series": {"granularities", "timestamp_col"}, + "targets.*.resources.registered_models.*": {"catalog_name", "name", "schema_name"}, + "targets.*.resources.registered_models.*.grants[*]": {"privileges", "principal"}, + "targets.*.resources.schemas.*": {"catalog_name", "name"}, + "targets.*.resources.schemas.*.grants[*]": {"privileges", "principal"}, + "targets.*.resources.secret_scopes.*": {"name"}, + "targets.*.resources.secret_scopes.*.keyvault_metadata": {"dns_name", "resource_id"}, + "targets.*.resources.secret_scopes.*.permissions[*]": {"level"}, + "targets.*.resources.volumes.*": {"catalog_name", "name", "schema_name", "volume_type"}, + "targets.*.resources.volumes.*.grants[*]": {"privileges", "principal"}, +} diff --git a/bundle/internal/validation/main.go b/bundle/internal/validation/main.go new file mode 100644 index 0000000000..8f5d64a393 --- /dev/null +++ b/bundle/internal/validation/main.go @@ -0,0 +1,13 @@ +package main + +import ( + "log" +) + +// This package is meant to be run from the root of the CLI repo. +func main() { + err := generateRequiredFields("bundle/internal/validation/generated") + if err != nil { + log.Fatalf("Error generating code: %v", err) + } +} diff --git a/bundle/internal/validation/required.go b/bundle/internal/validation/required.go new file mode 100644 index 0000000000..8163432bcc --- /dev/null +++ b/bundle/internal/validation/required.go @@ -0,0 +1,204 @@ +package main + +import ( + "bytes" + "fmt" + "os" + "path/filepath" + "reflect" + "sort" + "strings" + "text/template" + + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/libs/structdiff/structpath" + "github.com/databricks/cli/libs/structwalk" +) + +// PatternInfo represents validation requirements for a specific configuration pattern +type PatternInfo struct { + // The pattern for which the fields in Required are applicable. + // This is a string representation of [dyn.Parent]. + Parent string + + // List of required fields that should be set for every path in the + // config tree that matches the pattern. This field be a string of the + // form `{field1, field2, ...}`. + RequiredFields string +} + +// formatRequiredFields formats a list of field names into a Go slice literal string +func formatRequiredFields(fields []string) string { + if len(fields) == 0 { + return "{}" + } + + var quoted []string + for _, field := range fields { + quoted = append(quoted, fmt.Sprintf("%q", field)) + } + + return "{" + strings.Join(quoted, ", ") + "}" +} + +// extractRequiredFields walks through a struct type and extracts required field patterns +func extractRequiredFields(typ reflect.Type) []PatternInfo { + fieldsByPattern := make(map[string][]string) + + structwalk.WalkType(typ, func(path *structpath.PathNode, _ reflect.Type) bool { + if path == nil { + return true + } + + // Do not perform required validation on fields that are deprecated, internal, or readonly. + bundleTag := path.BundleTag() + if bundleTag.Internal() || bundleTag.ReadOnly() { + return false + } + + // The "omitempty" tag indicates the field is optional in bundle config. + if path.JSONTag().OmitEmpty() { + return true + } + + // Only perform required validation for struct fields. + field, ok := path.Field() + if !ok { + return true + } + + parentPath := path.Parent().DynPath() + fieldsByPattern[parentPath] = append(fieldsByPattern[parentPath], field) + return true + }) + + return buildPatternInfos(fieldsByPattern) +} + +// buildPatternInfos converts the field map to PatternInfo slice +func buildPatternInfos(fieldsByPattern map[string][]string) []PatternInfo { + patterns := make([]PatternInfo, 0, len(fieldsByPattern)) + + for parentPath, fields := range fieldsByPattern { + patterns = append(patterns, PatternInfo{ + Parent: parentPath, + RequiredFields: formatRequiredFields(fields), + }) + } + + return patterns +} + +// getGroupingKey determines the grouping key for organizing patterns +func getGroupingKey(parentPath string) string { + parts := strings.Split(parentPath, ".") + + switch parts[0] { + case "resources": + // Group resources by their type (e.g., "resources.jobs") + if len(parts) > 1 { + return parts[0] + "." + parts[1] + } + case "targets": + // Group target overrides by their first 3 keys + if len(parts) > 2 { + return strings.Join(parts[:3], ".") + } + } + + // Use the top level key for other fields + return parts[0] +} + +// groupPatternsByKey groups patterns by their logical grouping key +func groupPatternsByKey(patterns []PatternInfo) map[string][]PatternInfo { + groupedPatterns := make(map[string][]PatternInfo) + + for _, pattern := range patterns { + key := getGroupingKey(pattern.Parent) + groupedPatterns[key] = append(groupedPatterns[key], pattern) + } + + return groupedPatterns +} + +// sortGroupedPatterns sorts patterns within each group and returns them as a sorted slice +func sortGroupedPatterns(groupedPatterns map[string][]PatternInfo) [][]PatternInfo { + // Get sorted group keys + groupKeys := make([]string, 0, len(groupedPatterns)) + for key := range groupedPatterns { + groupKeys = append(groupKeys, key) + } + sort.Strings(groupKeys) + + // Build sorted result + result := make([][]PatternInfo, 0, len(groupKeys)) + for _, key := range groupKeys { + patterns := groupedPatterns[key] + + // Sort patterns within each group by parent path + sort.Slice(patterns, func(i, j int) bool { + return patterns[i].Parent < patterns[j].Parent + }) + + result = append(result, patterns) + } + + return result +} + +// RequiredFields returns grouped required field patterns for validation +func requiredFields() [][]PatternInfo { + patterns := extractRequiredFields(reflect.TypeOf(config.Root{})) + groupedPatterns := groupPatternsByKey(patterns) + return sortGroupedPatterns(groupedPatterns) +} + +// Generate creates a Go source file with required field validation rules +func generateRequiredFields(outPath string) error { + requiredFields := requiredFields() + + // Ensure output directory exists + if err := os.MkdirAll(outPath, 0o755); err != nil { + return fmt.Errorf("failed to create output directory: %w", err) + } + + // Parse and execute template + tmpl, err := template.New("validation").Parse(validationTemplate) + if err != nil { + return fmt.Errorf("failed to parse template: %w", err) + } + + var generatedCode bytes.Buffer + if err := tmpl.Execute(&generatedCode, requiredFields); err != nil { + return fmt.Errorf("failed to execute template: %w", err) + } + + // Write generated code to file + filePath := filepath.Join(outPath, "required_fields.go") + if err := os.WriteFile(filePath, generatedCode.Bytes(), 0o644); err != nil { + return fmt.Errorf("failed to write generated code: %w", err) + } + + return nil +} + +// validationTemplate is the Go text template for generating the validation map +const validationTemplate = `package generated + +// THIS FILE IS AUTOGENERATED. +// DO NOT EDIT THIS FILE DIRECTLY. + +import ( + _ "github.com/databricks/cli/libs/dyn" +) + +// RequiredFields maps [dyn.Pattern] to required fields they should have. +var RequiredFields = map[string][]string{ +{{- range . }} +{{ range . }} + "{{ .Parent }}": {{ .RequiredFields }}, +{{- end }} +{{- end }} +} +` diff --git a/libs/structdiff/structpath/path.go b/libs/structdiff/structpath/path.go index 29f3b59577..07c06d256a 100644 --- a/libs/structdiff/structpath/path.go +++ b/libs/structdiff/structpath/path.go @@ -97,6 +97,13 @@ func (p *PathNode) Field() (string, bool) { return "", false } +func (p *PathNode) Parent() *PathNode { + if p == nil { + return nil + } + return p.prev +} + // NewIndex creates a new PathNode for an array/slice index. func NewIndex(prev *PathNode, index int) *PathNode { if index < 0 { From 4c327c55828d1bf561634087a88fc0319c73eaef Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Wed, 11 Jun 2025 19:31:34 +0200 Subject: [PATCH 02/22] some cleanup --- bundle/internal/validation/required.go | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/bundle/internal/validation/required.go b/bundle/internal/validation/required.go index 8163432bcc..f8879347b8 100644 --- a/bundle/internal/validation/required.go +++ b/bundle/internal/validation/required.go @@ -18,16 +18,17 @@ import ( // PatternInfo represents validation requirements for a specific configuration pattern type PatternInfo struct { // The pattern for which the fields in Required are applicable. - // This is a string representation of [dyn.Parent]. + // This is a string representation of [dyn.Pattern]. Parent string // List of required fields that should be set for every path in the - // config tree that matches the pattern. This field be a string of the + // config tree that matches the pattern. This field will be a string of the // form `{field1, field2, ...}`. RequiredFields string } -// formatRequiredFields formats a list of field names into a Go slice literal string +// formatRequiredFields formats a list of field names into string of the form `{field1, field2, ...}` +// representing a Go slice literal. func formatRequiredFields(fields []string) string { if len(fields) == 0 { return "{}" @@ -50,7 +51,7 @@ func extractRequiredFields(typ reflect.Type) []PatternInfo { return true } - // Do not perform required validation on fields that are deprecated, internal, or readonly. + // Do not generate required validation code for fields that are internal or readonly. bundleTag := path.BundleTag() if bundleTag.Internal() || bundleTag.ReadOnly() { return false From f612f5a90e804b7a915022e14a1060b3d7f8949a Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Thu, 12 Jun 2025 11:11:21 +0200 Subject: [PATCH 03/22] some cleanup --- .../validation/generated/required_fields.go | 364 ------------------ bundle/internal/validation/required.go | 52 +-- 2 files changed, 30 insertions(+), 386 deletions(-) diff --git a/bundle/internal/validation/generated/required_fields.go b/bundle/internal/validation/generated/required_fields.go index a4ce44ab77..58f3dec056 100644 --- a/bundle/internal/validation/generated/required_fields.go +++ b/bundle/internal/validation/generated/required_fields.go @@ -9,191 +9,10 @@ import ( // RequiredFields maps [dyn.Pattern] to required fields they should have. var RequiredFields = map[string][]string{ - "artifacts.*.files[*]": {"source"}, "bundle": {"name"}, - "environments.*.artifacts.*.files[*]": {"source"}, - "environments.*.bundle": {"name"}, - "environments.*.permissions[*]": {"level"}, - "environments.*.resources.apps.*": {"source_code_path", "name"}, - "environments.*.resources.apps.*.permissions[*]": {"level"}, - "environments.*.resources.apps.*.resources[*]": {"name"}, - "environments.*.resources.apps.*.resources[*].job": {"id", "permission"}, - "environments.*.resources.apps.*.resources[*].secret": {"key", "permission", "scope"}, - "environments.*.resources.apps.*.resources[*].serving_endpoint": {"name", "permission"}, - "environments.*.resources.apps.*.resources[*].sql_warehouse": {"id", "permission"}, - "environments.*.resources.apps.*.resources[*].uc_securable": {"permission", "securable_full_name", "securable_type"}, - "environments.*.resources.clusters.*.cluster_log_conf.dbfs": {"destination"}, - "environments.*.resources.clusters.*.cluster_log_conf.s3": {"destination"}, - "environments.*.resources.clusters.*.cluster_log_conf.volumes": {"destination"}, - "environments.*.resources.clusters.*.init_scripts[*].abfss": {"destination"}, - "environments.*.resources.clusters.*.init_scripts[*].dbfs": {"destination"}, - "environments.*.resources.clusters.*.init_scripts[*].file": {"destination"}, - "environments.*.resources.clusters.*.init_scripts[*].gcs": {"destination"}, - "environments.*.resources.clusters.*.init_scripts[*].s3": {"destination"}, - "environments.*.resources.clusters.*.init_scripts[*].volumes": {"destination"}, - "environments.*.resources.clusters.*.init_scripts[*].workspace": {"destination"}, - "environments.*.resources.clusters.*.permissions[*]": {"level"}, - "environments.*.resources.clusters.*.workload_type": {"clients"}, - "environments.*.resources.dashboards.*.permissions[*]": {"level"}, - "environments.*.resources.experiments.*.permissions[*]": {"level"}, - "environments.*.resources.jobs.*.deployment": {"kind"}, - "environments.*.resources.jobs.*.environments[*]": {"environment_key"}, - "environments.*.resources.jobs.*.environments[*].spec": {"client"}, - "environments.*.resources.jobs.*.git_source": {"git_provider", "git_url"}, - "environments.*.resources.jobs.*.git_source.job_source": {"import_from_git_branch", "job_config_path"}, - "environments.*.resources.jobs.*.health.rules[*]": {"metric", "op", "value"}, - "environments.*.resources.jobs.*.job_clusters[*]": {"job_cluster_key", "new_cluster"}, - "environments.*.resources.jobs.*.job_clusters[*].new_cluster.cluster_log_conf.dbfs": {"destination"}, - "environments.*.resources.jobs.*.job_clusters[*].new_cluster.cluster_log_conf.s3": {"destination"}, - "environments.*.resources.jobs.*.job_clusters[*].new_cluster.cluster_log_conf.volumes": {"destination"}, - "environments.*.resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].abfss": {"destination"}, - "environments.*.resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].dbfs": {"destination"}, - "environments.*.resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].file": {"destination"}, - "environments.*.resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].gcs": {"destination"}, - "environments.*.resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].s3": {"destination"}, - "environments.*.resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].volumes": {"destination"}, - "environments.*.resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].workspace": {"destination"}, - "environments.*.resources.jobs.*.job_clusters[*].new_cluster.workload_type": {"clients"}, - "environments.*.resources.jobs.*.parameters[*]": {"default", "name"}, - "environments.*.resources.jobs.*.permissions[*]": {"level"}, - "environments.*.resources.jobs.*.queue": {"enabled"}, - "environments.*.resources.jobs.*.schedule": {"quartz_cron_expression", "timezone_id"}, - "environments.*.resources.jobs.*.tasks[*]": {"task_key"}, - "environments.*.resources.jobs.*.tasks[*].clean_rooms_notebook_task": {"clean_room_name", "notebook_name"}, - "environments.*.resources.jobs.*.tasks[*].condition_task": {"left", "op", "right"}, - "environments.*.resources.jobs.*.tasks[*].dbt_task": {"commands"}, - "environments.*.resources.jobs.*.tasks[*].depends_on[*]": {"task_key"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task": {"inputs", "task"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task": {"task_key"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.clean_rooms_notebook_task": {"clean_room_name", "notebook_name"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.condition_task": {"left", "op", "right"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.dbt_task": {"commands"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.depends_on[*]": {"task_key"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.for_each_task": {"inputs", "task"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.gen_ai_compute_task": {"dl_runtime_image"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.gen_ai_compute_task.compute": {"num_gpus"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.health.rules[*]": {"metric", "op", "value"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.libraries[*].cran": {"package"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.libraries[*].maven": {"coordinates"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.libraries[*].pypi": {"package"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.cluster_log_conf.dbfs": {"destination"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.cluster_log_conf.s3": {"destination"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.cluster_log_conf.volumes": {"destination"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].abfss": {"destination"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].dbfs": {"destination"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].file": {"destination"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].gcs": {"destination"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].s3": {"destination"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].volumes": {"destination"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].workspace": {"destination"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.workload_type": {"clients"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.notebook_task": {"notebook_path"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.pipeline_task": {"pipeline_id"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.python_wheel_task": {"entry_point", "package_name"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.run_job_task": {"job_id"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.spark_python_task": {"python_file"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.sql_task": {"warehouse_id"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.sql_task.alert": {"alert_id"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.sql_task.dashboard": {"dashboard_id"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.sql_task.file": {"path"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.sql_task.query": {"query_id"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.webhook_notifications.on_duration_warning_threshold_exceeded[*]": {"id"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.webhook_notifications.on_failure[*]": {"id"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.webhook_notifications.on_start[*]": {"id"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.webhook_notifications.on_streaming_backlog_exceeded[*]": {"id"}, - "environments.*.resources.jobs.*.tasks[*].for_each_task.task.webhook_notifications.on_success[*]": {"id"}, - "environments.*.resources.jobs.*.tasks[*].gen_ai_compute_task": {"dl_runtime_image"}, - "environments.*.resources.jobs.*.tasks[*].gen_ai_compute_task.compute": {"num_gpus"}, - "environments.*.resources.jobs.*.tasks[*].health.rules[*]": {"metric", "op", "value"}, - "environments.*.resources.jobs.*.tasks[*].libraries[*].cran": {"package"}, - "environments.*.resources.jobs.*.tasks[*].libraries[*].maven": {"coordinates"}, - "environments.*.resources.jobs.*.tasks[*].libraries[*].pypi": {"package"}, - "environments.*.resources.jobs.*.tasks[*].new_cluster.cluster_log_conf.dbfs": {"destination"}, - "environments.*.resources.jobs.*.tasks[*].new_cluster.cluster_log_conf.s3": {"destination"}, - "environments.*.resources.jobs.*.tasks[*].new_cluster.cluster_log_conf.volumes": {"destination"}, - "environments.*.resources.jobs.*.tasks[*].new_cluster.init_scripts[*].abfss": {"destination"}, - "environments.*.resources.jobs.*.tasks[*].new_cluster.init_scripts[*].dbfs": {"destination"}, - "environments.*.resources.jobs.*.tasks[*].new_cluster.init_scripts[*].file": {"destination"}, - "environments.*.resources.jobs.*.tasks[*].new_cluster.init_scripts[*].gcs": {"destination"}, - "environments.*.resources.jobs.*.tasks[*].new_cluster.init_scripts[*].s3": {"destination"}, - "environments.*.resources.jobs.*.tasks[*].new_cluster.init_scripts[*].volumes": {"destination"}, - "environments.*.resources.jobs.*.tasks[*].new_cluster.init_scripts[*].workspace": {"destination"}, - "environments.*.resources.jobs.*.tasks[*].new_cluster.workload_type": {"clients"}, - "environments.*.resources.jobs.*.tasks[*].notebook_task": {"notebook_path"}, - "environments.*.resources.jobs.*.tasks[*].pipeline_task": {"pipeline_id"}, - "environments.*.resources.jobs.*.tasks[*].python_wheel_task": {"entry_point", "package_name"}, - "environments.*.resources.jobs.*.tasks[*].run_job_task": {"job_id"}, - "environments.*.resources.jobs.*.tasks[*].spark_python_task": {"python_file"}, - "environments.*.resources.jobs.*.tasks[*].sql_task": {"warehouse_id"}, - "environments.*.resources.jobs.*.tasks[*].sql_task.alert": {"alert_id"}, - "environments.*.resources.jobs.*.tasks[*].sql_task.dashboard": {"dashboard_id"}, - "environments.*.resources.jobs.*.tasks[*].sql_task.file": {"path"}, - "environments.*.resources.jobs.*.tasks[*].sql_task.query": {"query_id"}, - "environments.*.resources.jobs.*.tasks[*].webhook_notifications.on_duration_warning_threshold_exceeded[*]": {"id"}, - "environments.*.resources.jobs.*.tasks[*].webhook_notifications.on_failure[*]": {"id"}, - "environments.*.resources.jobs.*.tasks[*].webhook_notifications.on_start[*]": {"id"}, - "environments.*.resources.jobs.*.tasks[*].webhook_notifications.on_streaming_backlog_exceeded[*]": {"id"}, - "environments.*.resources.jobs.*.tasks[*].webhook_notifications.on_success[*]": {"id"}, - "environments.*.resources.jobs.*.trigger.file_arrival": {"url"}, - "environments.*.resources.jobs.*.trigger.periodic": {"interval", "unit"}, - "environments.*.resources.jobs.*.webhook_notifications.on_duration_warning_threshold_exceeded[*]": {"id"}, - "environments.*.resources.jobs.*.webhook_notifications.on_failure[*]": {"id"}, - "environments.*.resources.jobs.*.webhook_notifications.on_start[*]": {"id"}, - "environments.*.resources.jobs.*.webhook_notifications.on_streaming_backlog_exceeded[*]": {"id"}, - "environments.*.resources.jobs.*.webhook_notifications.on_success[*]": {"id"}, - "environments.*.resources.model_serving_endpoints.*": {"name"}, - "environments.*.resources.model_serving_endpoints.*.ai_gateway.fallback_config": {"enabled"}, - "environments.*.resources.model_serving_endpoints.*.ai_gateway.rate_limits[*]": {"calls", "renewal_period"}, - "environments.*.resources.model_serving_endpoints.*.config.served_entities[*].external_model": {"name", "provider", "task"}, - "environments.*.resources.model_serving_endpoints.*.config.served_entities[*].external_model.amazon_bedrock_config": {"aws_region", "bedrock_provider"}, - "environments.*.resources.model_serving_endpoints.*.config.served_entities[*].external_model.custom_provider_config": {"custom_provider_url"}, - "environments.*.resources.model_serving_endpoints.*.config.served_entities[*].external_model.custom_provider_config.api_key_auth": {"key"}, - "environments.*.resources.model_serving_endpoints.*.config.served_entities[*].external_model.databricks_model_serving_config": {"databricks_workspace_url"}, - "environments.*.resources.model_serving_endpoints.*.config.served_entities[*].external_model.google_cloud_vertex_ai_config": {"project_id", "region"}, - "environments.*.resources.model_serving_endpoints.*.config.served_models[*]": {"model_name", "model_version", "scale_to_zero_enabled"}, - "environments.*.resources.model_serving_endpoints.*.config.traffic_config.routes[*]": {"served_model_name", "traffic_percentage"}, - "environments.*.resources.model_serving_endpoints.*.permissions[*]": {"level"}, - "environments.*.resources.model_serving_endpoints.*.rate_limits[*]": {"calls", "renewal_period"}, - "environments.*.resources.model_serving_endpoints.*.tags[*]": {"key"}, - "environments.*.resources.models.*": {"name"}, - "environments.*.resources.models.*.permissions[*]": {"level"}, - "environments.*.resources.pipelines.*.clusters[*].autoscale": {"max_workers", "min_workers"}, - "environments.*.resources.pipelines.*.clusters[*].cluster_log_conf.dbfs": {"destination"}, - "environments.*.resources.pipelines.*.clusters[*].cluster_log_conf.s3": {"destination"}, - "environments.*.resources.pipelines.*.clusters[*].cluster_log_conf.volumes": {"destination"}, - "environments.*.resources.pipelines.*.clusters[*].init_scripts[*].abfss": {"destination"}, - "environments.*.resources.pipelines.*.clusters[*].init_scripts[*].dbfs": {"destination"}, - "environments.*.resources.pipelines.*.clusters[*].init_scripts[*].file": {"destination"}, - "environments.*.resources.pipelines.*.clusters[*].init_scripts[*].gcs": {"destination"}, - "environments.*.resources.pipelines.*.clusters[*].init_scripts[*].s3": {"destination"}, - "environments.*.resources.pipelines.*.clusters[*].init_scripts[*].volumes": {"destination"}, - "environments.*.resources.pipelines.*.clusters[*].init_scripts[*].workspace": {"destination"}, - "environments.*.resources.pipelines.*.deployment": {"kind"}, - "environments.*.resources.pipelines.*.gateway_definition": {"connection_name", "gateway_storage_catalog", "gateway_storage_schema"}, - "environments.*.resources.pipelines.*.ingestion_definition.objects[*].report": {"destination_catalog", "destination_schema", "source_url"}, - "environments.*.resources.pipelines.*.ingestion_definition.objects[*].schema": {"destination_catalog", "destination_schema", "source_schema"}, - "environments.*.resources.pipelines.*.ingestion_definition.objects[*].table": {"destination_catalog", "destination_schema", "source_table"}, - "environments.*.resources.pipelines.*.libraries[*].maven": {"coordinates"}, - "environments.*.resources.pipelines.*.permissions[*]": {"level"}, - "environments.*.resources.pipelines.*.restart_window": {"start_hour"}, - "environments.*.resources.quality_monitors.*": {"table_name", "assets_dir", "output_schema_name"}, - "environments.*.resources.quality_monitors.*.custom_metrics[*]": {"definition", "input_columns", "name", "output_data_type", "type"}, - "environments.*.resources.quality_monitors.*.inference_log": {"granularities", "model_id_col", "prediction_col", "problem_type", "timestamp_col"}, - "environments.*.resources.quality_monitors.*.schedule": {"quartz_cron_expression", "timezone_id"}, - "environments.*.resources.quality_monitors.*.time_series": {"granularities", "timestamp_col"}, - "environments.*.resources.registered_models.*": {"catalog_name", "name", "schema_name"}, - "environments.*.resources.registered_models.*.grants[*]": {"privileges", "principal"}, - "environments.*.resources.schemas.*": {"catalog_name", "name"}, - "environments.*.resources.schemas.*.grants[*]": {"privileges", "principal"}, - "environments.*.resources.secret_scopes.*": {"name"}, - "environments.*.resources.secret_scopes.*.keyvault_metadata": {"dns_name", "resource_id"}, - "environments.*.resources.secret_scopes.*.permissions[*]": {"level"}, - "environments.*.resources.volumes.*": {"catalog_name", "name", "schema_name", "volume_type"}, - "environments.*.resources.volumes.*.grants[*]": {"privileges", "principal"}, - "permissions[*]": {"level"}, "resources.apps.*": {"source_code_path", "name"}, @@ -384,187 +203,4 @@ var RequiredFields = map[string][]string{ "resources.volumes.*": {"catalog_name", "name", "schema_name", "volume_type"}, "resources.volumes.*.grants[*]": {"privileges", "principal"}, - - "targets.*.artifacts.*.files[*]": {"source"}, - - "targets.*.bundle": {"name"}, - - "targets.*.permissions[*]": {"level"}, - - "targets.*.resources.apps.*": {"source_code_path", "name"}, - "targets.*.resources.apps.*.permissions[*]": {"level"}, - "targets.*.resources.apps.*.resources[*]": {"name"}, - "targets.*.resources.apps.*.resources[*].job": {"id", "permission"}, - "targets.*.resources.apps.*.resources[*].secret": {"key", "permission", "scope"}, - "targets.*.resources.apps.*.resources[*].serving_endpoint": {"name", "permission"}, - "targets.*.resources.apps.*.resources[*].sql_warehouse": {"id", "permission"}, - "targets.*.resources.apps.*.resources[*].uc_securable": {"permission", "securable_full_name", "securable_type"}, - "targets.*.resources.clusters.*.cluster_log_conf.dbfs": {"destination"}, - "targets.*.resources.clusters.*.cluster_log_conf.s3": {"destination"}, - "targets.*.resources.clusters.*.cluster_log_conf.volumes": {"destination"}, - "targets.*.resources.clusters.*.init_scripts[*].abfss": {"destination"}, - "targets.*.resources.clusters.*.init_scripts[*].dbfs": {"destination"}, - "targets.*.resources.clusters.*.init_scripts[*].file": {"destination"}, - "targets.*.resources.clusters.*.init_scripts[*].gcs": {"destination"}, - "targets.*.resources.clusters.*.init_scripts[*].s3": {"destination"}, - "targets.*.resources.clusters.*.init_scripts[*].volumes": {"destination"}, - "targets.*.resources.clusters.*.init_scripts[*].workspace": {"destination"}, - "targets.*.resources.clusters.*.permissions[*]": {"level"}, - "targets.*.resources.clusters.*.workload_type": {"clients"}, - "targets.*.resources.dashboards.*.permissions[*]": {"level"}, - "targets.*.resources.experiments.*.permissions[*]": {"level"}, - "targets.*.resources.jobs.*.deployment": {"kind"}, - "targets.*.resources.jobs.*.environments[*]": {"environment_key"}, - "targets.*.resources.jobs.*.environments[*].spec": {"client"}, - "targets.*.resources.jobs.*.git_source": {"git_provider", "git_url"}, - "targets.*.resources.jobs.*.git_source.job_source": {"import_from_git_branch", "job_config_path"}, - "targets.*.resources.jobs.*.health.rules[*]": {"metric", "op", "value"}, - "targets.*.resources.jobs.*.job_clusters[*]": {"job_cluster_key", "new_cluster"}, - "targets.*.resources.jobs.*.job_clusters[*].new_cluster.cluster_log_conf.dbfs": {"destination"}, - "targets.*.resources.jobs.*.job_clusters[*].new_cluster.cluster_log_conf.s3": {"destination"}, - "targets.*.resources.jobs.*.job_clusters[*].new_cluster.cluster_log_conf.volumes": {"destination"}, - "targets.*.resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].abfss": {"destination"}, - "targets.*.resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].dbfs": {"destination"}, - "targets.*.resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].file": {"destination"}, - "targets.*.resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].gcs": {"destination"}, - "targets.*.resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].s3": {"destination"}, - "targets.*.resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].volumes": {"destination"}, - "targets.*.resources.jobs.*.job_clusters[*].new_cluster.init_scripts[*].workspace": {"destination"}, - "targets.*.resources.jobs.*.job_clusters[*].new_cluster.workload_type": {"clients"}, - "targets.*.resources.jobs.*.parameters[*]": {"default", "name"}, - "targets.*.resources.jobs.*.permissions[*]": {"level"}, - "targets.*.resources.jobs.*.queue": {"enabled"}, - "targets.*.resources.jobs.*.schedule": {"quartz_cron_expression", "timezone_id"}, - "targets.*.resources.jobs.*.tasks[*]": {"task_key"}, - "targets.*.resources.jobs.*.tasks[*].clean_rooms_notebook_task": {"clean_room_name", "notebook_name"}, - "targets.*.resources.jobs.*.tasks[*].condition_task": {"left", "op", "right"}, - "targets.*.resources.jobs.*.tasks[*].dbt_task": {"commands"}, - "targets.*.resources.jobs.*.tasks[*].depends_on[*]": {"task_key"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task": {"inputs", "task"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task": {"task_key"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.clean_rooms_notebook_task": {"clean_room_name", "notebook_name"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.condition_task": {"left", "op", "right"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.dbt_task": {"commands"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.depends_on[*]": {"task_key"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.for_each_task": {"inputs", "task"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.gen_ai_compute_task": {"dl_runtime_image"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.gen_ai_compute_task.compute": {"num_gpus"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.health.rules[*]": {"metric", "op", "value"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.libraries[*].cran": {"package"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.libraries[*].maven": {"coordinates"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.libraries[*].pypi": {"package"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.cluster_log_conf.dbfs": {"destination"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.cluster_log_conf.s3": {"destination"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.cluster_log_conf.volumes": {"destination"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].abfss": {"destination"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].dbfs": {"destination"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].file": {"destination"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].gcs": {"destination"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].s3": {"destination"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].volumes": {"destination"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.init_scripts[*].workspace": {"destination"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.new_cluster.workload_type": {"clients"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.notebook_task": {"notebook_path"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.pipeline_task": {"pipeline_id"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.python_wheel_task": {"entry_point", "package_name"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.run_job_task": {"job_id"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.spark_python_task": {"python_file"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.sql_task": {"warehouse_id"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.sql_task.alert": {"alert_id"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.sql_task.dashboard": {"dashboard_id"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.sql_task.file": {"path"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.sql_task.query": {"query_id"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.webhook_notifications.on_duration_warning_threshold_exceeded[*]": {"id"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.webhook_notifications.on_failure[*]": {"id"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.webhook_notifications.on_start[*]": {"id"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.webhook_notifications.on_streaming_backlog_exceeded[*]": {"id"}, - "targets.*.resources.jobs.*.tasks[*].for_each_task.task.webhook_notifications.on_success[*]": {"id"}, - "targets.*.resources.jobs.*.tasks[*].gen_ai_compute_task": {"dl_runtime_image"}, - "targets.*.resources.jobs.*.tasks[*].gen_ai_compute_task.compute": {"num_gpus"}, - "targets.*.resources.jobs.*.tasks[*].health.rules[*]": {"metric", "op", "value"}, - "targets.*.resources.jobs.*.tasks[*].libraries[*].cran": {"package"}, - "targets.*.resources.jobs.*.tasks[*].libraries[*].maven": {"coordinates"}, - "targets.*.resources.jobs.*.tasks[*].libraries[*].pypi": {"package"}, - "targets.*.resources.jobs.*.tasks[*].new_cluster.cluster_log_conf.dbfs": {"destination"}, - "targets.*.resources.jobs.*.tasks[*].new_cluster.cluster_log_conf.s3": {"destination"}, - "targets.*.resources.jobs.*.tasks[*].new_cluster.cluster_log_conf.volumes": {"destination"}, - "targets.*.resources.jobs.*.tasks[*].new_cluster.init_scripts[*].abfss": {"destination"}, - "targets.*.resources.jobs.*.tasks[*].new_cluster.init_scripts[*].dbfs": {"destination"}, - "targets.*.resources.jobs.*.tasks[*].new_cluster.init_scripts[*].file": {"destination"}, - "targets.*.resources.jobs.*.tasks[*].new_cluster.init_scripts[*].gcs": {"destination"}, - "targets.*.resources.jobs.*.tasks[*].new_cluster.init_scripts[*].s3": {"destination"}, - "targets.*.resources.jobs.*.tasks[*].new_cluster.init_scripts[*].volumes": {"destination"}, - "targets.*.resources.jobs.*.tasks[*].new_cluster.init_scripts[*].workspace": {"destination"}, - "targets.*.resources.jobs.*.tasks[*].new_cluster.workload_type": {"clients"}, - "targets.*.resources.jobs.*.tasks[*].notebook_task": {"notebook_path"}, - "targets.*.resources.jobs.*.tasks[*].pipeline_task": {"pipeline_id"}, - "targets.*.resources.jobs.*.tasks[*].python_wheel_task": {"entry_point", "package_name"}, - "targets.*.resources.jobs.*.tasks[*].run_job_task": {"job_id"}, - "targets.*.resources.jobs.*.tasks[*].spark_python_task": {"python_file"}, - "targets.*.resources.jobs.*.tasks[*].sql_task": {"warehouse_id"}, - "targets.*.resources.jobs.*.tasks[*].sql_task.alert": {"alert_id"}, - "targets.*.resources.jobs.*.tasks[*].sql_task.dashboard": {"dashboard_id"}, - "targets.*.resources.jobs.*.tasks[*].sql_task.file": {"path"}, - "targets.*.resources.jobs.*.tasks[*].sql_task.query": {"query_id"}, - "targets.*.resources.jobs.*.tasks[*].webhook_notifications.on_duration_warning_threshold_exceeded[*]": {"id"}, - "targets.*.resources.jobs.*.tasks[*].webhook_notifications.on_failure[*]": {"id"}, - "targets.*.resources.jobs.*.tasks[*].webhook_notifications.on_start[*]": {"id"}, - "targets.*.resources.jobs.*.tasks[*].webhook_notifications.on_streaming_backlog_exceeded[*]": {"id"}, - "targets.*.resources.jobs.*.tasks[*].webhook_notifications.on_success[*]": {"id"}, - "targets.*.resources.jobs.*.trigger.file_arrival": {"url"}, - "targets.*.resources.jobs.*.trigger.periodic": {"interval", "unit"}, - "targets.*.resources.jobs.*.webhook_notifications.on_duration_warning_threshold_exceeded[*]": {"id"}, - "targets.*.resources.jobs.*.webhook_notifications.on_failure[*]": {"id"}, - "targets.*.resources.jobs.*.webhook_notifications.on_start[*]": {"id"}, - "targets.*.resources.jobs.*.webhook_notifications.on_streaming_backlog_exceeded[*]": {"id"}, - "targets.*.resources.jobs.*.webhook_notifications.on_success[*]": {"id"}, - "targets.*.resources.model_serving_endpoints.*": {"name"}, - "targets.*.resources.model_serving_endpoints.*.ai_gateway.fallback_config": {"enabled"}, - "targets.*.resources.model_serving_endpoints.*.ai_gateway.rate_limits[*]": {"calls", "renewal_period"}, - "targets.*.resources.model_serving_endpoints.*.config.served_entities[*].external_model": {"name", "provider", "task"}, - "targets.*.resources.model_serving_endpoints.*.config.served_entities[*].external_model.amazon_bedrock_config": {"aws_region", "bedrock_provider"}, - "targets.*.resources.model_serving_endpoints.*.config.served_entities[*].external_model.custom_provider_config": {"custom_provider_url"}, - "targets.*.resources.model_serving_endpoints.*.config.served_entities[*].external_model.custom_provider_config.api_key_auth": {"key"}, - "targets.*.resources.model_serving_endpoints.*.config.served_entities[*].external_model.databricks_model_serving_config": {"databricks_workspace_url"}, - "targets.*.resources.model_serving_endpoints.*.config.served_entities[*].external_model.google_cloud_vertex_ai_config": {"project_id", "region"}, - "targets.*.resources.model_serving_endpoints.*.config.served_models[*]": {"model_name", "model_version", "scale_to_zero_enabled"}, - "targets.*.resources.model_serving_endpoints.*.config.traffic_config.routes[*]": {"served_model_name", "traffic_percentage"}, - "targets.*.resources.model_serving_endpoints.*.permissions[*]": {"level"}, - "targets.*.resources.model_serving_endpoints.*.rate_limits[*]": {"calls", "renewal_period"}, - "targets.*.resources.model_serving_endpoints.*.tags[*]": {"key"}, - "targets.*.resources.models.*": {"name"}, - "targets.*.resources.models.*.permissions[*]": {"level"}, - "targets.*.resources.pipelines.*.clusters[*].autoscale": {"max_workers", "min_workers"}, - "targets.*.resources.pipelines.*.clusters[*].cluster_log_conf.dbfs": {"destination"}, - "targets.*.resources.pipelines.*.clusters[*].cluster_log_conf.s3": {"destination"}, - "targets.*.resources.pipelines.*.clusters[*].cluster_log_conf.volumes": {"destination"}, - "targets.*.resources.pipelines.*.clusters[*].init_scripts[*].abfss": {"destination"}, - "targets.*.resources.pipelines.*.clusters[*].init_scripts[*].dbfs": {"destination"}, - "targets.*.resources.pipelines.*.clusters[*].init_scripts[*].file": {"destination"}, - "targets.*.resources.pipelines.*.clusters[*].init_scripts[*].gcs": {"destination"}, - "targets.*.resources.pipelines.*.clusters[*].init_scripts[*].s3": {"destination"}, - "targets.*.resources.pipelines.*.clusters[*].init_scripts[*].volumes": {"destination"}, - "targets.*.resources.pipelines.*.clusters[*].init_scripts[*].workspace": {"destination"}, - "targets.*.resources.pipelines.*.deployment": {"kind"}, - "targets.*.resources.pipelines.*.gateway_definition": {"connection_name", "gateway_storage_catalog", "gateway_storage_schema"}, - "targets.*.resources.pipelines.*.ingestion_definition.objects[*].report": {"destination_catalog", "destination_schema", "source_url"}, - "targets.*.resources.pipelines.*.ingestion_definition.objects[*].schema": {"destination_catalog", "destination_schema", "source_schema"}, - "targets.*.resources.pipelines.*.ingestion_definition.objects[*].table": {"destination_catalog", "destination_schema", "source_table"}, - "targets.*.resources.pipelines.*.libraries[*].maven": {"coordinates"}, - "targets.*.resources.pipelines.*.permissions[*]": {"level"}, - "targets.*.resources.pipelines.*.restart_window": {"start_hour"}, - "targets.*.resources.quality_monitors.*": {"table_name", "assets_dir", "output_schema_name"}, - "targets.*.resources.quality_monitors.*.custom_metrics[*]": {"definition", "input_columns", "name", "output_data_type", "type"}, - "targets.*.resources.quality_monitors.*.inference_log": {"granularities", "model_id_col", "prediction_col", "problem_type", "timestamp_col"}, - "targets.*.resources.quality_monitors.*.schedule": {"quartz_cron_expression", "timezone_id"}, - "targets.*.resources.quality_monitors.*.time_series": {"granularities", "timestamp_col"}, - "targets.*.resources.registered_models.*": {"catalog_name", "name", "schema_name"}, - "targets.*.resources.registered_models.*.grants[*]": {"privileges", "principal"}, - "targets.*.resources.schemas.*": {"catalog_name", "name"}, - "targets.*.resources.schemas.*.grants[*]": {"privileges", "principal"}, - "targets.*.resources.secret_scopes.*": {"name"}, - "targets.*.resources.secret_scopes.*.keyvault_metadata": {"dns_name", "resource_id"}, - "targets.*.resources.secret_scopes.*.permissions[*]": {"level"}, - "targets.*.resources.volumes.*": {"catalog_name", "name", "schema_name", "volume_type"}, - "targets.*.resources.volumes.*.grants[*]": {"privileges", "principal"}, } diff --git a/bundle/internal/validation/required.go b/bundle/internal/validation/required.go index f8879347b8..f5acb2b5e4 100644 --- a/bundle/internal/validation/required.go +++ b/bundle/internal/validation/required.go @@ -15,7 +15,6 @@ import ( "github.com/databricks/cli/libs/structwalk" ) -// PatternInfo represents validation requirements for a specific configuration pattern type PatternInfo struct { // The pattern for which the fields in Required are applicable. // This is a string representation of [dyn.Pattern]. @@ -43,10 +42,10 @@ func formatRequiredFields(fields []string) string { } // extractRequiredFields walks through a struct type and extracts required field patterns -func extractRequiredFields(typ reflect.Type) []PatternInfo { +func extractRequiredFields(typ reflect.Type) ([]PatternInfo, error) { fieldsByPattern := make(map[string][]string) - structwalk.WalkType(typ, func(path *structpath.PathNode, _ reflect.Type) bool { + err := structwalk.WalkType(typ, func(path *structpath.PathNode, _ reflect.Type) bool { if path == nil { return true } @@ -62,7 +61,6 @@ func extractRequiredFields(typ reflect.Type) []PatternInfo { return true } - // Only perform required validation for struct fields. field, ok := path.Field() if !ok { return true @@ -73,7 +71,7 @@ func extractRequiredFields(typ reflect.Type) []PatternInfo { return true }) - return buildPatternInfos(fieldsByPattern) + return buildPatternInfos(fieldsByPattern), err } // buildPatternInfos converts the field map to PatternInfo slice @@ -94,17 +92,9 @@ func buildPatternInfos(fieldsByPattern map[string][]string) []PatternInfo { func getGroupingKey(parentPath string) string { parts := strings.Split(parentPath, ".") - switch parts[0] { - case "resources": - // Group resources by their type (e.g., "resources.jobs") - if len(parts) > 1 { - return parts[0] + "." + parts[1] - } - case "targets": - // Group target overrides by their first 3 keys - if len(parts) > 2 { - return strings.Join(parts[:3], ".") - } + // Group resources by their resource type (e.g., "resources.jobs") + if parts[0] == "resources" && len(parts) > 1 { + return parts[0] + "." + parts[1] } // Use the top level key for other fields @@ -123,6 +113,17 @@ func groupPatternsByKey(patterns []PatternInfo) map[string][]PatternInfo { return groupedPatterns } +func filterTargetsAndEnvironments(patterns map[string][]PatternInfo) map[string][]PatternInfo { + filtered := make(map[string][]PatternInfo) + for key, patterns := range patterns { + if key == "targets" || key == "environments" { + continue + } + filtered[key] = patterns + } + return filtered +} + // sortGroupedPatterns sorts patterns within each group and returns them as a sorted slice func sortGroupedPatterns(groupedPatterns map[string][]PatternInfo) [][]PatternInfo { // Get sorted group keys @@ -149,15 +150,22 @@ func sortGroupedPatterns(groupedPatterns map[string][]PatternInfo) [][]PatternIn } // RequiredFields returns grouped required field patterns for validation -func requiredFields() [][]PatternInfo { - patterns := extractRequiredFields(reflect.TypeOf(config.Root{})) +func requiredFields() ([][]PatternInfo, error) { + patterns, err := extractRequiredFields(reflect.TypeOf(config.Root{})) + if err != nil { + return nil, err + } groupedPatterns := groupPatternsByKey(patterns) - return sortGroupedPatterns(groupedPatterns) + filteredPatterns := filterTargetsAndEnvironments(groupedPatterns) + return sortGroupedPatterns(filteredPatterns), nil } // Generate creates a Go source file with required field validation rules func generateRequiredFields(outPath string) error { - requiredFields := requiredFields() + requiredFields, err := requiredFields() + if err != nil { + return fmt.Errorf("failed to generate required fields: %w", err) + } // Ensure output directory exists if err := os.MkdirAll(outPath, 0o755); err != nil { @@ -197,9 +205,9 @@ import ( // RequiredFields maps [dyn.Pattern] to required fields they should have. var RequiredFields = map[string][]string{ {{- range . }} -{{ range . }} +{{- range . }} "{{ .Parent }}": {{ .RequiredFields }}, {{- end }} -{{- end }} +{{ end -}} } ` From 9199d25f4cc819ff3b99de876b4047b968aaae1f Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Thu, 12 Jun 2025 13:21:33 +0200 Subject: [PATCH 04/22] add generate --- .codegen.json | 4 +++- Makefile | 5 ++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/.codegen.json b/.codegen.json index 54c108218f..616909c0f7 100644 --- a/.codegen.json +++ b/.codegen.json @@ -23,9 +23,11 @@ "post_generate": [ "go test -timeout 240s -run TestConsistentDatabricksSdkVersion github.com/databricks/cli/internal/build", "make schema", + "make generate-validation", "echo 'bundle/internal/tf/schema/\\*.go linguist-generated=true' >> ./.gitattributes", "echo 'go.sum linguist-generated=true' >> ./.gitattributes", - "echo 'bundle/schema/jsonschema.json linguist-generated=true' >> ./.gitattributes" + "echo 'bundle/schema/jsonschema.json linguist-generated=true' >> ./.gitattributes", + "echo 'bundle/internal/validation/generated/required_fields.go linguist-generated=true' >> ./.gitattributes" ] } } diff --git a/Makefile b/Makefile index 137f9f86b1..b7084eebf9 100644 --- a/Makefile +++ b/Makefile @@ -82,6 +82,9 @@ integration: integration-short: VERBOSE_TEST=1 $(INTEGRATION) -short +generate-validation: + go run ./bundle/internal/validation/main.go + generate: genkit update-sdk [ ! -f tagging.py ] || mv tagging.py internal/genkit/tagging.py @@ -91,4 +94,4 @@ generate: [ ! -f .github/workflows/next-changelog.yml ] || rm .github/workflows/next-changelog.yml pushd experimental/python && make codegen -.PHONY: lint lintfull tidy lintcheck fmt fmtfull test cover showcover build snapshot schema integration integration-short acc-cover acc-showcover docs ws links checks +.PHONY: lint lintfull tidy lintcheck fmt fmtfull test cover showcover build snapshot schema integration integration-short acc-cover acc-showcover docs ws links checks generate-validation From de8477e830cd785f2c5b3aa75aea19f8ab5961d2 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Thu, 12 Jun 2025 13:36:07 +0200 Subject: [PATCH 05/22] fix and generate --- .gitattributes | 1 + Makefile | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.gitattributes b/.gitattributes index 629be14230..3c7e54d7f6 100755 --- a/.gitattributes +++ b/.gitattributes @@ -149,3 +149,4 @@ cmd/workspace/workspace/workspace.go linguist-generated=true bundle/internal/tf/schema/\*.go linguist-generated=true go.sum linguist-generated=true bundle/schema/jsonschema.json linguist-generated=true +bundle/internal/validation/generated/required_fields.go linguist-generated=true diff --git a/Makefile b/Makefile index b7084eebf9..d120b4144e 100644 --- a/Makefile +++ b/Makefile @@ -83,7 +83,7 @@ integration-short: VERBOSE_TEST=1 $(INTEGRATION) -short generate-validation: - go run ./bundle/internal/validation/main.go + go run ./bundle/internal/validation/. generate: genkit update-sdk From 427ae3bec7270b7f4a7e212c720ac16ee9c0b278 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Thu, 12 Jun 2025 14:59:13 +0200 Subject: [PATCH 06/22] minor rename to type --- bundle/internal/validation/required.go | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/bundle/internal/validation/required.go b/bundle/internal/validation/required.go index f5acb2b5e4..e7fa4ae365 100644 --- a/bundle/internal/validation/required.go +++ b/bundle/internal/validation/required.go @@ -15,7 +15,7 @@ import ( "github.com/databricks/cli/libs/structwalk" ) -type PatternInfo struct { +type RequiredPatternInfo struct { // The pattern for which the fields in Required are applicable. // This is a string representation of [dyn.Pattern]. Parent string @@ -42,7 +42,7 @@ func formatRequiredFields(fields []string) string { } // extractRequiredFields walks through a struct type and extracts required field patterns -func extractRequiredFields(typ reflect.Type) ([]PatternInfo, error) { +func extractRequiredFields(typ reflect.Type) ([]RequiredPatternInfo, error) { fieldsByPattern := make(map[string][]string) err := structwalk.WalkType(typ, func(path *structpath.PathNode, _ reflect.Type) bool { @@ -75,11 +75,11 @@ func extractRequiredFields(typ reflect.Type) ([]PatternInfo, error) { } // buildPatternInfos converts the field map to PatternInfo slice -func buildPatternInfos(fieldsByPattern map[string][]string) []PatternInfo { - patterns := make([]PatternInfo, 0, len(fieldsByPattern)) +func buildPatternInfos(fieldsByPattern map[string][]string) []RequiredPatternInfo { + patterns := make([]RequiredPatternInfo, 0, len(fieldsByPattern)) for parentPath, fields := range fieldsByPattern { - patterns = append(patterns, PatternInfo{ + patterns = append(patterns, RequiredPatternInfo{ Parent: parentPath, RequiredFields: formatRequiredFields(fields), }) @@ -102,8 +102,8 @@ func getGroupingKey(parentPath string) string { } // groupPatternsByKey groups patterns by their logical grouping key -func groupPatternsByKey(patterns []PatternInfo) map[string][]PatternInfo { - groupedPatterns := make(map[string][]PatternInfo) +func groupPatternsByKey(patterns []RequiredPatternInfo) map[string][]RequiredPatternInfo { + groupedPatterns := make(map[string][]RequiredPatternInfo) for _, pattern := range patterns { key := getGroupingKey(pattern.Parent) @@ -113,8 +113,8 @@ func groupPatternsByKey(patterns []PatternInfo) map[string][]PatternInfo { return groupedPatterns } -func filterTargetsAndEnvironments(patterns map[string][]PatternInfo) map[string][]PatternInfo { - filtered := make(map[string][]PatternInfo) +func filterTargetsAndEnvironments(patterns map[string][]RequiredPatternInfo) map[string][]RequiredPatternInfo { + filtered := make(map[string][]RequiredPatternInfo) for key, patterns := range patterns { if key == "targets" || key == "environments" { continue @@ -125,7 +125,7 @@ func filterTargetsAndEnvironments(patterns map[string][]PatternInfo) map[string] } // sortGroupedPatterns sorts patterns within each group and returns them as a sorted slice -func sortGroupedPatterns(groupedPatterns map[string][]PatternInfo) [][]PatternInfo { +func sortGroupedPatterns(groupedPatterns map[string][]RequiredPatternInfo) [][]RequiredPatternInfo { // Get sorted group keys groupKeys := make([]string, 0, len(groupedPatterns)) for key := range groupedPatterns { @@ -134,7 +134,7 @@ func sortGroupedPatterns(groupedPatterns map[string][]PatternInfo) [][]PatternIn sort.Strings(groupKeys) // Build sorted result - result := make([][]PatternInfo, 0, len(groupKeys)) + result := make([][]RequiredPatternInfo, 0, len(groupKeys)) for _, key := range groupKeys { patterns := groupedPatterns[key] @@ -150,7 +150,7 @@ func sortGroupedPatterns(groupedPatterns map[string][]PatternInfo) [][]PatternIn } // RequiredFields returns grouped required field patterns for validation -func requiredFields() ([][]PatternInfo, error) { +func requiredFields() ([][]RequiredPatternInfo, error) { patterns, err := extractRequiredFields(reflect.TypeOf(config.Root{})) if err != nil { return nil, err From 2a0e91a757ba5921481283bed982a9afaec8756b Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Thu, 12 Jun 2025 15:36:45 +0200 Subject: [PATCH 07/22] Generate enum values for bundle fields --- bundle/config/variable/resolve_metastore.go | 7 +- bundle/internal/validation/enum.go | 253 ++++++++++++++++++ .../validation/generated/enum_fields.go | 126 +++++++++ .../validation/generated/required_fields.go | 1 - bundle/internal/validation/main.go | 7 +- bundle/internal/validation/required.go | 13 +- go.mod | 52 ++-- go.sum | 61 +++++ 8 files changed, 481 insertions(+), 39 deletions(-) create mode 100644 bundle/internal/validation/enum.go create mode 100644 bundle/internal/validation/generated/enum_fields.go diff --git a/bundle/config/variable/resolve_metastore.go b/bundle/config/variable/resolve_metastore.go index 8a0a8c7edb..eb5109f9eb 100644 --- a/bundle/config/variable/resolve_metastore.go +++ b/bundle/config/variable/resolve_metastore.go @@ -11,11 +11,8 @@ type resolveMetastore struct { } func (l resolveMetastore) Resolve(ctx context.Context, w *databricks.WorkspaceClient) (string, error) { - entity, err := w.Metastores.GetByName(ctx, l.name) - if err != nil { - return "", err - } - return entity.MetastoreId, nil + // PLACEHOLDER, this will be fixed in the SDK bump. + return "", nil } func (l resolveMetastore) String() string { diff --git a/bundle/internal/validation/enum.go b/bundle/internal/validation/enum.go new file mode 100644 index 0000000000..7c88e5093c --- /dev/null +++ b/bundle/internal/validation/enum.go @@ -0,0 +1,253 @@ +package main + +import ( + "bytes" + "fmt" + "os" + "path/filepath" + "reflect" + "sort" + "strings" + "text/template" + + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/libs/structdiff/structpath" + "github.com/databricks/cli/libs/structwalk" +) + +type EnumPatternInfo struct { + // The full pattern for which the enum values are applicable. + // This is a string representation of [dyn.Pattern]. + Pattern string + + // List of valid enum values for this pattern. This field will be a string of the + // form `{value1, value2, ...}`. + EnumValues string +} + +// hasValuesMethod checks if the pointer to a type has a Values() method +func hasValuesMethod(typ reflect.Type) bool { + // Check if the pointer to the type has a Values() method + ptrType := reflect.PointerTo(typ) + return checkValuesMethodSignature(ptrType) +} + +// checkValuesMethodSignature verifies that a type has a Values() method with correct signature +func checkValuesMethodSignature(typ reflect.Type) bool { + method, exists := typ.MethodByName("Values") + if !exists { + return false + } + + // Verify the method signature: func() []string + methodType := method.Type + if methodType.NumIn() != 1 || methodType.NumOut() != 1 { + return false + } + + // Check return type is []string + returnType := methodType.Out(0) + if returnType.Kind() != reflect.Slice || returnType.Elem().Kind() != reflect.String { + return false + } + + return true +} + +// getEnumValues calls the Values() method on a pointer to the type to get valid enum values +func getEnumValues(typ reflect.Type) ([]string, error) { + // Create a pointer to zero value of the type and call Values() on it + zeroValue := reflect.Zero(typ) + ptrValue := reflect.New(typ) + ptrValue.Elem().Set(zeroValue) + method := ptrValue.MethodByName("Values") + + if !method.IsValid() { + return nil, fmt.Errorf("Values method not found on pointer to type %s", typ.Name()) + } + + result := method.Call(nil) + if len(result) != 1 { + return nil, fmt.Errorf("Values method should return exactly one value") + } + + enumSlice := result[0] + if enumSlice.Kind() != reflect.Slice { + return nil, fmt.Errorf("Values method should return a slice") + } + + values := make([]string, enumSlice.Len()) + for i := 0; i < enumSlice.Len(); i++ { + values[i] = enumSlice.Index(i).String() + } + + return values, nil +} + +// extractEnumFields walks through a struct type and extracts enum field patterns +func extractEnumFields(typ reflect.Type) ([]EnumPatternInfo, error) { + var patterns []EnumPatternInfo + + err := structwalk.WalkType(typ, func(path *structpath.PathNode, fieldType reflect.Type) bool { + if path == nil { + return true + } + + // Do not generate enum validation code for fields that are internal or readonly. + bundleTag := path.BundleTag() + if bundleTag.Internal() || bundleTag.ReadOnly() { + return false + } + + // Check if this type has a Values() method on its pointer + if !hasValuesMethod(fieldType) { + return true + } + + // Get the enum values + enumValues, err := getEnumValues(fieldType) + if err != nil { + // Skip if we can't get enum values + return true + } + + // Store the full pattern path (not parent path) + fullPattern := path.DynPath() + patterns = append(patterns, EnumPatternInfo{ + Pattern: fullPattern, + EnumValues: formatValues(enumValues), + }) + + return true + }) + if err != nil { + return nil, err + } + + return patterns, nil +} + +// getEnumGroupingKey determines the grouping key for organizing patterns +// TODO: Combine with the required function. +func getEnumGroupingKey(pattern string) string { + parts := strings.Split(pattern, ".") + + // Group resources by their resource type (e.g., "resources.jobs") + if parts[0] == "resources" && len(parts) > 1 { + return parts[0] + "." + parts[1] + } + + // Use the top level key for other fields + return parts[0] +} + +// groupEnumPatternsByKey groups patterns by their logical grouping key +func groupEnumPatternsByKey(patterns []EnumPatternInfo) map[string][]EnumPatternInfo { + groupedPatterns := make(map[string][]EnumPatternInfo) + + for _, pattern := range patterns { + key := getEnumGroupingKey(pattern.Pattern) + groupedPatterns[key] = append(groupedPatterns[key], pattern) + } + + return groupedPatterns +} + +func filterEnumTargetsAndEnvironments(patterns map[string][]EnumPatternInfo) map[string][]EnumPatternInfo { + filtered := make(map[string][]EnumPatternInfo) + for key, patterns := range patterns { + if key == "targets" || key == "environments" { + continue + } + filtered[key] = patterns + } + return filtered +} + +// sortGroupedEnumPatterns sorts patterns within each group and returns them as a sorted slice +func sortGroupedEnumPatterns(groupedPatterns map[string][]EnumPatternInfo) [][]EnumPatternInfo { + // Get sorted group keys + groupKeys := make([]string, 0, len(groupedPatterns)) + for key := range groupedPatterns { + groupKeys = append(groupKeys, key) + } + sort.Strings(groupKeys) + + // Build sorted result + result := make([][]EnumPatternInfo, 0, len(groupKeys)) + for _, key := range groupKeys { + patterns := groupedPatterns[key] + + // Sort patterns within each group by pattern path + sort.Slice(patterns, func(i, j int) bool { + return patterns[i].Pattern < patterns[j].Pattern + }) + + result = append(result, patterns) + } + + return result +} + +// enumFields returns grouped enum field patterns for validation +func enumFields() ([][]EnumPatternInfo, error) { + patterns, err := extractEnumFields(reflect.TypeOf(config.Root{})) + if err != nil { + return nil, err + } + groupedPatterns := groupEnumPatternsByKey(patterns) + filteredPatterns := filterEnumTargetsAndEnvironments(groupedPatterns) + return sortGroupedEnumPatterns(filteredPatterns), nil +} + +// Generate creates a Go source file with enum field validation rules +func generateEnumFields(outPath string) error { + enumFields, err := enumFields() + if err != nil { + return fmt.Errorf("failed to generate enum fields: %w", err) + } + + // Ensure output directory exists + if err := os.MkdirAll(outPath, 0o755); err != nil { + return fmt.Errorf("failed to create output directory: %w", err) + } + + // Parse and execute template + tmpl, err := template.New("enum_validation").Parse(enumValidationTemplate) + if err != nil { + return fmt.Errorf("failed to parse template: %w", err) + } + + var generatedCode bytes.Buffer + if err := tmpl.Execute(&generatedCode, enumFields); err != nil { + return fmt.Errorf("failed to execute template: %w", err) + } + + // Write generated code to file + filePath := filepath.Join(outPath, "enum_fields.go") + if err := os.WriteFile(filePath, generatedCode.Bytes(), 0o644); err != nil { + return fmt.Errorf("failed to write generated code: %w", err) + } + + return nil +} + +// enumValidationTemplate is the Go text template for generating the enum validation map +const enumValidationTemplate = `package generated + +// THIS FILE IS AUTOGENERATED. +// DO NOT EDIT THIS FILE DIRECTLY. + +import ( + _ "github.com/databricks/cli/libs/dyn" +) + +// EnumFields maps [dyn.Pattern] to valid enum values they should have. +var EnumFields = map[string][]string{ +{{- range . }} +{{- range . }} + "{{ .Pattern }}": {{ .EnumValues }}, +{{- end }} +{{ end -}} +} +` diff --git a/bundle/internal/validation/generated/enum_fields.go b/bundle/internal/validation/generated/enum_fields.go new file mode 100644 index 0000000000..ec679c399a --- /dev/null +++ b/bundle/internal/validation/generated/enum_fields.go @@ -0,0 +1,126 @@ +package generated + +// THIS FILE IS AUTOGENERATED. +// DO NOT EDIT THIS FILE DIRECTLY. + +import ( + _ "github.com/databricks/cli/libs/dyn" +) + +// EnumFields maps [dyn.Pattern] to valid enum values they should have. +var EnumFields = map[string][]string{ + "resources.apps.*.active_deployment.mode": {"AUTO_SYNC", "SNAPSHOT"}, + "resources.apps.*.active_deployment.status.state": {"CANCELLED", "FAILED", "IN_PROGRESS", "SUCCEEDED"}, + "resources.apps.*.app_status.state": {"CRASHED", "DEPLOYING", "RUNNING", "UNAVAILABLE"}, + "resources.apps.*.compute_status.state": {"ACTIVE", "DELETING", "ERROR", "STARTING", "STOPPED", "STOPPING", "UPDATING"}, + "resources.apps.*.pending_deployment.mode": {"AUTO_SYNC", "SNAPSHOT"}, + "resources.apps.*.pending_deployment.status.state": {"CANCELLED", "FAILED", "IN_PROGRESS", "SUCCEEDED"}, + "resources.apps.*.resources[*].job.permission": {"CAN_MANAGE", "CAN_MANAGE_RUN", "CAN_VIEW", "IS_OWNER"}, + "resources.apps.*.resources[*].secret.permission": {"MANAGE", "READ", "WRITE"}, + "resources.apps.*.resources[*].serving_endpoint.permission": {"CAN_MANAGE", "CAN_QUERY", "CAN_VIEW"}, + "resources.apps.*.resources[*].sql_warehouse.permission": {"CAN_MANAGE", "CAN_USE", "IS_OWNER"}, + "resources.apps.*.resources[*].uc_securable.permission": {"READ_VOLUME", "WRITE_VOLUME"}, + "resources.apps.*.resources[*].uc_securable.securable_type": {"VOLUME"}, + + "resources.clusters.*.aws_attributes.availability": {"ON_DEMAND", "SPOT", "SPOT_WITH_FALLBACK"}, + "resources.clusters.*.aws_attributes.ebs_volume_type": {"GENERAL_PURPOSE_SSD", "THROUGHPUT_OPTIMIZED_HDD"}, + "resources.clusters.*.azure_attributes.availability": {"ON_DEMAND_AZURE", "SPOT_AZURE", "SPOT_WITH_FALLBACK_AZURE"}, + "resources.clusters.*.data_security_mode": {"DATA_SECURITY_MODE_AUTO", "DATA_SECURITY_MODE_DEDICATED", "DATA_SECURITY_MODE_STANDARD", "LEGACY_PASSTHROUGH", "LEGACY_SINGLE_USER", "LEGACY_SINGLE_USER_STANDARD", "LEGACY_TABLE_ACL", "NONE", "SINGLE_USER", "USER_ISOLATION"}, + "resources.clusters.*.gcp_attributes.availability": {"ON_DEMAND_GCP", "PREEMPTIBLE_GCP", "PREEMPTIBLE_WITH_FALLBACK_GCP"}, + "resources.clusters.*.kind": {"CLASSIC_PREVIEW"}, + "resources.clusters.*.runtime_engine": {"NULL", "PHOTON", "STANDARD"}, + + "resources.dashboards.*.lifecycle_state": {"ACTIVE", "TRASHED"}, + + "resources.jobs.*.continuous.pause_status": {"PAUSED", "UNPAUSED"}, + "resources.jobs.*.deployment.kind": {"BUNDLE"}, + "resources.jobs.*.edit_mode": {"EDITABLE", "UI_LOCKED"}, + "resources.jobs.*.format": {"MULTI_TASK", "SINGLE_TASK"}, + "resources.jobs.*.git_source.git_provider": {"awsCodeCommit", "azureDevOpsServices", "bitbucketCloud", "bitbucketServer", "gitHub", "gitHubEnterprise", "gitLab", "gitLabEnterpriseEdition"}, + "resources.jobs.*.git_source.job_source.dirty_state": {"DISCONNECTED", "NOT_SYNCED"}, + "resources.jobs.*.health.rules[*].metric": {"RUN_DURATION_SECONDS", "STREAMING_BACKLOG_BYTES", "STREAMING_BACKLOG_FILES", "STREAMING_BACKLOG_RECORDS", "STREAMING_BACKLOG_SECONDS"}, + "resources.jobs.*.health.rules[*].op": {"GREATER_THAN"}, + "resources.jobs.*.job_clusters[*].new_cluster.aws_attributes.availability": {"ON_DEMAND", "SPOT", "SPOT_WITH_FALLBACK"}, + "resources.jobs.*.job_clusters[*].new_cluster.aws_attributes.ebs_volume_type": {"GENERAL_PURPOSE_SSD", "THROUGHPUT_OPTIMIZED_HDD"}, + "resources.jobs.*.job_clusters[*].new_cluster.azure_attributes.availability": {"ON_DEMAND_AZURE", "SPOT_AZURE", "SPOT_WITH_FALLBACK_AZURE"}, + "resources.jobs.*.job_clusters[*].new_cluster.data_security_mode": {"DATA_SECURITY_MODE_AUTO", "DATA_SECURITY_MODE_DEDICATED", "DATA_SECURITY_MODE_STANDARD", "LEGACY_PASSTHROUGH", "LEGACY_SINGLE_USER", "LEGACY_SINGLE_USER_STANDARD", "LEGACY_TABLE_ACL", "NONE", "SINGLE_USER", "USER_ISOLATION"}, + "resources.jobs.*.job_clusters[*].new_cluster.gcp_attributes.availability": {"ON_DEMAND_GCP", "PREEMPTIBLE_GCP", "PREEMPTIBLE_WITH_FALLBACK_GCP"}, + "resources.jobs.*.job_clusters[*].new_cluster.kind": {"CLASSIC_PREVIEW"}, + "resources.jobs.*.job_clusters[*].new_cluster.runtime_engine": {"NULL", "PHOTON", "STANDARD"}, + "resources.jobs.*.performance_target": {"PERFORMANCE_OPTIMIZED", "STANDARD"}, + "resources.jobs.*.schedule.pause_status": {"PAUSED", "UNPAUSED"}, + "resources.jobs.*.tasks[*].condition_task.op": {"EQUAL_TO", "GREATER_THAN", "GREATER_THAN_OR_EQUAL", "LESS_THAN", "LESS_THAN_OR_EQUAL", "NOT_EQUAL"}, + "resources.jobs.*.tasks[*].dbt_task.source": {"GIT", "WORKSPACE"}, + "resources.jobs.*.tasks[*].for_each_task.task.condition_task.op": {"EQUAL_TO", "GREATER_THAN", "GREATER_THAN_OR_EQUAL", "LESS_THAN", "LESS_THAN_OR_EQUAL", "NOT_EQUAL"}, + "resources.jobs.*.tasks[*].for_each_task.task.dbt_task.source": {"GIT", "WORKSPACE"}, + "resources.jobs.*.tasks[*].for_each_task.task.gen_ai_compute_task.source": {"GIT", "WORKSPACE"}, + "resources.jobs.*.tasks[*].for_each_task.task.health.rules[*].metric": {"RUN_DURATION_SECONDS", "STREAMING_BACKLOG_BYTES", "STREAMING_BACKLOG_FILES", "STREAMING_BACKLOG_RECORDS", "STREAMING_BACKLOG_SECONDS"}, + "resources.jobs.*.tasks[*].for_each_task.task.health.rules[*].op": {"GREATER_THAN"}, + "resources.jobs.*.tasks[*].for_each_task.task.new_cluster.aws_attributes.availability": {"ON_DEMAND", "SPOT", "SPOT_WITH_FALLBACK"}, + "resources.jobs.*.tasks[*].for_each_task.task.new_cluster.aws_attributes.ebs_volume_type": {"GENERAL_PURPOSE_SSD", "THROUGHPUT_OPTIMIZED_HDD"}, + "resources.jobs.*.tasks[*].for_each_task.task.new_cluster.azure_attributes.availability": {"ON_DEMAND_AZURE", "SPOT_AZURE", "SPOT_WITH_FALLBACK_AZURE"}, + "resources.jobs.*.tasks[*].for_each_task.task.new_cluster.data_security_mode": {"DATA_SECURITY_MODE_AUTO", "DATA_SECURITY_MODE_DEDICATED", "DATA_SECURITY_MODE_STANDARD", "LEGACY_PASSTHROUGH", "LEGACY_SINGLE_USER", "LEGACY_SINGLE_USER_STANDARD", "LEGACY_TABLE_ACL", "NONE", "SINGLE_USER", "USER_ISOLATION"}, + "resources.jobs.*.tasks[*].for_each_task.task.new_cluster.gcp_attributes.availability": {"ON_DEMAND_GCP", "PREEMPTIBLE_GCP", "PREEMPTIBLE_WITH_FALLBACK_GCP"}, + "resources.jobs.*.tasks[*].for_each_task.task.new_cluster.kind": {"CLASSIC_PREVIEW"}, + "resources.jobs.*.tasks[*].for_each_task.task.new_cluster.runtime_engine": {"NULL", "PHOTON", "STANDARD"}, + "resources.jobs.*.tasks[*].for_each_task.task.notebook_task.source": {"GIT", "WORKSPACE"}, + "resources.jobs.*.tasks[*].for_each_task.task.power_bi_task.power_bi_model.authentication_method": {"OAUTH", "PAT"}, + "resources.jobs.*.tasks[*].for_each_task.task.power_bi_task.power_bi_model.storage_mode": {"DIRECT_QUERY", "DUAL", "IMPORT"}, + "resources.jobs.*.tasks[*].for_each_task.task.power_bi_task.tables[*].storage_mode": {"DIRECT_QUERY", "DUAL", "IMPORT"}, + "resources.jobs.*.tasks[*].for_each_task.task.run_if": {"ALL_DONE", "ALL_FAILED", "ALL_SUCCESS", "AT_LEAST_ONE_FAILED", "AT_LEAST_ONE_SUCCESS", "NONE_FAILED"}, + "resources.jobs.*.tasks[*].for_each_task.task.spark_python_task.source": {"GIT", "WORKSPACE"}, + "resources.jobs.*.tasks[*].for_each_task.task.sql_task.file.source": {"GIT", "WORKSPACE"}, + "resources.jobs.*.tasks[*].gen_ai_compute_task.source": {"GIT", "WORKSPACE"}, + "resources.jobs.*.tasks[*].health.rules[*].metric": {"RUN_DURATION_SECONDS", "STREAMING_BACKLOG_BYTES", "STREAMING_BACKLOG_FILES", "STREAMING_BACKLOG_RECORDS", "STREAMING_BACKLOG_SECONDS"}, + "resources.jobs.*.tasks[*].health.rules[*].op": {"GREATER_THAN"}, + "resources.jobs.*.tasks[*].new_cluster.aws_attributes.availability": {"ON_DEMAND", "SPOT", "SPOT_WITH_FALLBACK"}, + "resources.jobs.*.tasks[*].new_cluster.aws_attributes.ebs_volume_type": {"GENERAL_PURPOSE_SSD", "THROUGHPUT_OPTIMIZED_HDD"}, + "resources.jobs.*.tasks[*].new_cluster.azure_attributes.availability": {"ON_DEMAND_AZURE", "SPOT_AZURE", "SPOT_WITH_FALLBACK_AZURE"}, + "resources.jobs.*.tasks[*].new_cluster.data_security_mode": {"DATA_SECURITY_MODE_AUTO", "DATA_SECURITY_MODE_DEDICATED", "DATA_SECURITY_MODE_STANDARD", "LEGACY_PASSTHROUGH", "LEGACY_SINGLE_USER", "LEGACY_SINGLE_USER_STANDARD", "LEGACY_TABLE_ACL", "NONE", "SINGLE_USER", "USER_ISOLATION"}, + "resources.jobs.*.tasks[*].new_cluster.gcp_attributes.availability": {"ON_DEMAND_GCP", "PREEMPTIBLE_GCP", "PREEMPTIBLE_WITH_FALLBACK_GCP"}, + "resources.jobs.*.tasks[*].new_cluster.kind": {"CLASSIC_PREVIEW"}, + "resources.jobs.*.tasks[*].new_cluster.runtime_engine": {"NULL", "PHOTON", "STANDARD"}, + "resources.jobs.*.tasks[*].notebook_task.source": {"GIT", "WORKSPACE"}, + "resources.jobs.*.tasks[*].power_bi_task.power_bi_model.authentication_method": {"OAUTH", "PAT"}, + "resources.jobs.*.tasks[*].power_bi_task.power_bi_model.storage_mode": {"DIRECT_QUERY", "DUAL", "IMPORT"}, + "resources.jobs.*.tasks[*].power_bi_task.tables[*].storage_mode": {"DIRECT_QUERY", "DUAL", "IMPORT"}, + "resources.jobs.*.tasks[*].run_if": {"ALL_DONE", "ALL_FAILED", "ALL_SUCCESS", "AT_LEAST_ONE_FAILED", "AT_LEAST_ONE_SUCCESS", "NONE_FAILED"}, + "resources.jobs.*.tasks[*].spark_python_task.source": {"GIT", "WORKSPACE"}, + "resources.jobs.*.tasks[*].sql_task.file.source": {"GIT", "WORKSPACE"}, + "resources.jobs.*.trigger.pause_status": {"PAUSED", "UNPAUSED"}, + "resources.jobs.*.trigger.periodic.unit": {"DAYS", "HOURS", "WEEKS"}, + "resources.jobs.*.trigger.table.condition": {"ALL_UPDATED", "ANY_UPDATED"}, + "resources.jobs.*.trigger.table_update.condition": {"ALL_UPDATED", "ANY_UPDATED"}, + + "resources.model_serving_endpoints.*.ai_gateway.guardrails.input.pii.behavior": {"BLOCK", "NONE"}, + "resources.model_serving_endpoints.*.ai_gateway.guardrails.output.pii.behavior": {"BLOCK", "NONE"}, + "resources.model_serving_endpoints.*.ai_gateway.rate_limits[*].key": {"endpoint", "user"}, + "resources.model_serving_endpoints.*.ai_gateway.rate_limits[*].renewal_period": {"minute"}, + "resources.model_serving_endpoints.*.config.served_entities[*].external_model.amazon_bedrock_config.bedrock_provider": {"ai21labs", "amazon", "anthropic", "cohere"}, + "resources.model_serving_endpoints.*.config.served_entities[*].external_model.provider": {"ai21labs", "amazon-bedrock", "anthropic", "cohere", "custom", "databricks-model-serving", "google-cloud-vertex-ai", "openai", "palm"}, + "resources.model_serving_endpoints.*.config.served_entities[*].workload_type": {"CPU", "GPU_LARGE", "GPU_MEDIUM", "GPU_SMALL", "MULTIGPU_MEDIUM"}, + "resources.model_serving_endpoints.*.config.served_models[*].workload_type": {"CPU", "GPU_LARGE", "GPU_MEDIUM", "GPU_SMALL", "MULTIGPU_MEDIUM"}, + "resources.model_serving_endpoints.*.rate_limits[*].key": {"endpoint", "user"}, + "resources.model_serving_endpoints.*.rate_limits[*].renewal_period": {"minute"}, + + "resources.pipelines.*.clusters[*].autoscale.mode": {"ENHANCED", "LEGACY"}, + "resources.pipelines.*.clusters[*].aws_attributes.availability": {"ON_DEMAND", "SPOT", "SPOT_WITH_FALLBACK"}, + "resources.pipelines.*.clusters[*].aws_attributes.ebs_volume_type": {"GENERAL_PURPOSE_SSD", "THROUGHPUT_OPTIMIZED_HDD"}, + "resources.pipelines.*.clusters[*].azure_attributes.availability": {"ON_DEMAND_AZURE", "SPOT_AZURE", "SPOT_WITH_FALLBACK_AZURE"}, + "resources.pipelines.*.clusters[*].gcp_attributes.availability": {"ON_DEMAND_GCP", "PREEMPTIBLE_GCP", "PREEMPTIBLE_WITH_FALLBACK_GCP"}, + "resources.pipelines.*.deployment.kind": {"BUNDLE"}, + "resources.pipelines.*.ingestion_definition.objects[*].report.table_configuration.scd_type": {"SCD_TYPE_1", "SCD_TYPE_2"}, + "resources.pipelines.*.ingestion_definition.objects[*].schema.table_configuration.scd_type": {"SCD_TYPE_1", "SCD_TYPE_2"}, + "resources.pipelines.*.ingestion_definition.objects[*].table.table_configuration.scd_type": {"SCD_TYPE_1", "SCD_TYPE_2"}, + "resources.pipelines.*.ingestion_definition.source_type": {"DYNAMICS365", "GA4_RAW_DATA", "MANAGED_POSTGRESQL", "MYSQL", "NETSUITE", "ORACLE", "POSTGRESQL", "SALESFORCE", "SERVICENOW", "SHAREPOINT", "SQLSERVER", "TERADATA", "WORKDAY_RAAS"}, + "resources.pipelines.*.ingestion_definition.table_configuration.scd_type": {"SCD_TYPE_1", "SCD_TYPE_2"}, + "resources.pipelines.*.restart_window.days_of_week[*]": {"FRIDAY", "MONDAY", "SATURDAY", "SUNDAY", "THURSDAY", "TUESDAY", "WEDNESDAY"}, + + "resources.quality_monitors.*.custom_metrics[*].type": {"CUSTOM_METRIC_TYPE_AGGREGATE", "CUSTOM_METRIC_TYPE_DERIVED", "CUSTOM_METRIC_TYPE_DRIFT"}, + "resources.quality_monitors.*.inference_log.problem_type": {"PROBLEM_TYPE_CLASSIFICATION", "PROBLEM_TYPE_REGRESSION"}, + "resources.quality_monitors.*.schedule.pause_status": {"PAUSED", "UNPAUSED"}, + + "resources.secret_scopes.*.backend_type": {"AZURE_KEYVAULT", "DATABRICKS"}, + + "resources.volumes.*.volume_type": {"EXTERNAL", "MANAGED"}, +} diff --git a/bundle/internal/validation/generated/required_fields.go b/bundle/internal/validation/generated/required_fields.go index 58f3dec056..ba37a0a895 100644 --- a/bundle/internal/validation/generated/required_fields.go +++ b/bundle/internal/validation/generated/required_fields.go @@ -43,7 +43,6 @@ var RequiredFields = map[string][]string{ "resources.jobs.*.deployment": {"kind"}, "resources.jobs.*.environments[*]": {"environment_key"}, - "resources.jobs.*.environments[*].spec": {"client"}, "resources.jobs.*.git_source": {"git_provider", "git_url"}, "resources.jobs.*.git_source.job_source": {"import_from_git_branch", "job_config_path"}, "resources.jobs.*.health.rules[*]": {"metric", "op", "value"}, diff --git a/bundle/internal/validation/main.go b/bundle/internal/validation/main.go index 8f5d64a393..0b8f5645f8 100644 --- a/bundle/internal/validation/main.go +++ b/bundle/internal/validation/main.go @@ -6,7 +6,12 @@ import ( // This package is meant to be run from the root of the CLI repo. func main() { - err := generateRequiredFields("bundle/internal/validation/generated") + // err := generateRequiredFields("bundle/internal/validation/generated") + // if err != nil { + // log.Fatalf("Error generating code: %v", err) + // } + + err := generateEnumFields("bundle/internal/validation/generated") if err != nil { log.Fatalf("Error generating code: %v", err) } diff --git a/bundle/internal/validation/required.go b/bundle/internal/validation/required.go index e7fa4ae365..697de710c6 100644 --- a/bundle/internal/validation/required.go +++ b/bundle/internal/validation/required.go @@ -26,16 +26,15 @@ type RequiredPatternInfo struct { RequiredFields string } -// formatRequiredFields formats a list of field names into string of the form `{field1, field2, ...}` -// representing a Go slice literal. -func formatRequiredFields(fields []string) string { - if len(fields) == 0 { +// formatValues formats a list of strings into a Go slice literal string of the form `{value1, value2, ...}` +func formatValues(values []string) string { + if len(values) == 0 { return "{}" } var quoted []string - for _, field := range fields { - quoted = append(quoted, fmt.Sprintf("%q", field)) + for _, value := range values { + quoted = append(quoted, fmt.Sprintf("%q", value)) } return "{" + strings.Join(quoted, ", ") + "}" @@ -81,7 +80,7 @@ func buildPatternInfos(fieldsByPattern map[string][]string) []RequiredPatternInf for parentPath, fields := range fieldsByPattern { patterns = append(patterns, RequiredPatternInfo{ Parent: parentPath, - RequiredFields: formatRequiredFields(fields), + RequiredFields: formatValues(fields), }) } diff --git a/go.mod b/go.mod index 499a73557b..bbe525a590 100644 --- a/go.mod +++ b/go.mod @@ -9,7 +9,7 @@ require ( github.com/BurntSushi/toml v1.5.0 // MIT github.com/Masterminds/semver/v3 v3.3.1 // MIT github.com/briandowns/spinner v1.23.1 // Apache 2.0 - github.com/databricks/databricks-sdk-go v0.71.0 // Apache 2.0 + github.com/databricks/databricks-sdk-go v0.72.0 // Apache 2.0 github.com/fatih/color v1.18.0 // MIT github.com/google/uuid v1.6.0 // BSD-3-Clause github.com/gorilla/mux v1.8.1 // BSD 3-Clause @@ -28,21 +28,21 @@ require ( github.com/spf13/cobra v1.9.1 // Apache 2.0 github.com/spf13/pflag v1.0.6 // BSD-3-Clause github.com/stretchr/testify v1.10.0 // MIT - golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 - golang.org/x/mod v0.24.0 + golang.org/x/exp v0.0.0-20250606033433-dcc06ee1d476 + golang.org/x/mod v0.25.0 golang.org/x/oauth2 v0.30.0 - golang.org/x/sync v0.14.0 + golang.org/x/sync v0.15.0 golang.org/x/sys v0.33.0 golang.org/x/term v0.32.0 - golang.org/x/text v0.25.0 + golang.org/x/text v0.26.0 gopkg.in/ini.v1 v1.67.0 // Apache 2.0 gopkg.in/yaml.v3 v3.0.1 ) require ( - cloud.google.com/go/auth v0.4.2 // indirect - cloud.google.com/go/auth/oauth2adapt v0.2.2 // indirect - cloud.google.com/go/compute/metadata v0.5.2 // indirect + cloud.google.com/go/auth v0.16.2 // indirect + cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect + cloud.google.com/go/compute/metadata v0.7.0 // indirect github.com/ProtonMail/go-crypto v1.1.6 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect github.com/bitfield/gotestdox v0.2.2 // indirect @@ -53,39 +53,41 @@ require ( github.com/dnephin/pflag v1.0.7 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect github.com/fsnotify/fsnotify v1.8.0 // indirect - github.com/go-logr/logr v1.4.2 // indirect + github.com/go-logr/logr v1.4.3 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect github.com/golang/protobuf v1.5.4 // indirect github.com/google/go-cmp v0.7.0 // indirect github.com/google/go-querystring v1.1.0 // indirect - github.com/google/s2a-go v0.1.7 // indirect + github.com/google/s2a-go v0.1.9 // indirect github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect github.com/google/yamlfmt v0.17.0 // indirect - github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect + github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect + github.com/googleapis/gax-go/v2 v2.14.2 // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect github.com/hashicorp/go-retryablehttp v0.7.7 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect - github.com/kr/pretty v0.1.0 // indirect + github.com/kr/pretty v0.3.1 // indirect github.com/mattn/go-colorable v0.1.13 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/stretchr/objx v0.5.2 // indirect github.com/zclconf/go-cty v1.16.2 // indirect go.opencensus.io v0.24.0 // indirect - go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 // indirect - go.opentelemetry.io/otel v1.31.0 // indirect - go.opentelemetry.io/otel/metric v1.31.0 // indirect - go.opentelemetry.io/otel/trace v1.31.0 // indirect - golang.org/x/crypto v0.36.0 // indirect - golang.org/x/net v0.38.0 // indirect - golang.org/x/time v0.5.0 // indirect - golang.org/x/tools v0.31.0 // indirect - google.golang.org/api v0.182.0 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20241015192408-796eee8c2d53 // indirect - google.golang.org/grpc v1.69.4 // indirect - google.golang.org/protobuf v1.36.3 // indirect - gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 // indirect + go.opentelemetry.io/auto/sdk v1.1.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect + go.opentelemetry.io/otel v1.36.0 // indirect + go.opentelemetry.io/otel/metric v1.36.0 // indirect + go.opentelemetry.io/otel/trace v1.36.0 // indirect + golang.org/x/crypto v0.39.0 // indirect + golang.org/x/net v0.41.0 // indirect + golang.org/x/time v0.12.0 // indirect + golang.org/x/tools v0.34.0 // indirect + google.golang.org/api v0.236.0 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect + google.golang.org/grpc v1.73.0 // indirect + google.golang.org/protobuf v1.36.6 // indirect + gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect gotest.tools/gotestsum v1.12.1 // indirect ) diff --git a/go.sum b/go.sum index 06323b68bc..da8fff3a27 100644 --- a/go.sum +++ b/go.sum @@ -1,10 +1,16 @@ cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go/auth v0.4.2 h1:sb0eyLkhRtpq5jA+a8KWw0W70YcdVca7KJ8TM0AFYDg= cloud.google.com/go/auth v0.4.2/go.mod h1:Kqvlz1cf1sNA0D+sYJnkPQOP+JMHkuHeIgVmCRtZOLc= +cloud.google.com/go/auth v0.16.2 h1:QvBAGFPLrDeoiNjyfVunhQ10HKNYuOwZ5noee0M5df4= +cloud.google.com/go/auth v0.16.2/go.mod h1:sRBas2Y1fB1vZTdurouM0AzuYQBMZinrUYL8EufhtEA= cloud.google.com/go/auth/oauth2adapt v0.2.2 h1:+TTV8aXpjeChS9M+aTtN/TjdQnzJvmzKFt//oWu7HX4= cloud.google.com/go/auth/oauth2adapt v0.2.2/go.mod h1:wcYjgpZI9+Yu7LyYBg4pqSiaRkfEK3GQcpb7C/uyF1Q= +cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc= +cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c= cloud.google.com/go/compute/metadata v0.5.2 h1:UxK4uu/Tn+I3p2dYWTfiX4wva7aYlKixAHn3fyqngqo= cloud.google.com/go/compute/metadata v0.5.2/go.mod h1:C66sj2AluDcIqakBq/M8lw8/ybHgOZqin2obFxa/E5k= +cloud.google.com/go/compute/metadata v0.7.0 h1:PBWF+iiAerVNe8UCHxdOt6eHLVc3ydFeOCw78U8ytSU= +cloud.google.com/go/compute/metadata v0.7.0/go.mod h1:j5MvL9PprKL39t166CoB1uVHfQMs4tFQZZcKwksXUjo= dario.cat/mergo v1.0.2 h1:85+piFYR1tMbRrLcDwR18y4UKJ3aH1Tbzi24VRW1TK8= dario.cat/mergo v1.0.2/go.mod h1:E/hbnu0NxMFBjpMIE34DRGLWqDy0g5FuKDhCb31ngxA= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= @@ -36,10 +42,13 @@ github.com/cloudflare/circl v1.6.1 h1:zqIqSPIndyBh1bjLVVDHMPpVKqp8Su/V+6MeDzzQBQ github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s= github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI= github.com/databricks/databricks-sdk-go v0.71.0 h1:YVNcvQUcgzlKesxDolDXSQPbNcCldubYLvM71hzVmUY= github.com/databricks/databricks-sdk-go v0.71.0/go.mod h1:xBtjeP9nq+6MgTewZW1EcbRkD7aDY9gZvcRPcwPhZjw= +github.com/databricks/databricks-sdk-go v0.72.0 h1:vNS4zlpvNYiXsy/7/lzV7cuu/yOcT/1xpfuJw3+W3TA= +github.com/databricks/databricks-sdk-go v0.72.0/go.mod h1:xBtjeP9nq+6MgTewZW1EcbRkD7aDY9gZvcRPcwPhZjw= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -67,6 +76,8 @@ github.com/go-git/go-git/v5 v5.14.0/go.mod h1:Z5Xhoia5PcWA3NF8vRLURn9E5FRhSl7dGj github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= @@ -98,6 +109,8 @@ github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o= github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw= +github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0= +github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= @@ -107,8 +120,12 @@ github.com/google/yamlfmt v0.17.0 h1:/tdp01rIlvLz3LgJ2NtMLnqgAadZm33P7GcPU680b+w github.com/google/yamlfmt v0.17.0/go.mod h1:gs0UEklJOYkUJ+OOCG0hg9n+DzucKDPlJElTUasVNK8= github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs= github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0= +github.com/googleapis/enterprise-certificate-proxy v0.3.6 h1:GW/XbdyBFQ8Qe+YAmFU9uHLo7OnF5tL52HFAgMmyrf4= +github.com/googleapis/enterprise-certificate-proxy v0.3.6/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA= github.com/googleapis/gax-go/v2 v2.12.4 h1:9gWcmF85Wvq4ryPFvGFaOgPIs1AQX0d0bcbGw4Z96qg= github.com/googleapis/gax-go/v2 v2.12.4/go.mod h1:KYEYLorsnIGDi/rPC8b5TdlB9kbKoFubselGIoBMCwI= +github.com/googleapis/gax-go/v2 v2.14.2 h1:eBLnkZ9635krYIPD+ag1USrOAI0Nr0QYF3+/3GqO0k0= +github.com/googleapis/gax-go/v2 v2.14.2/go.mod h1:ON64QhlJkhVtSqp4v1uaK92VyZ2gmvDQsweuyLV+8+w= github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg= @@ -137,9 +154,13 @@ github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4 github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/manifoldco/promptui v0.9.0 h1:3V4HzJk1TtXW1MTZMP7mdlwbBpIinw3HztaIlYthEiA= github.com/manifoldco/promptui v0.9.0/go.mod h1:ka04sppxSGFAtxX0qhlYQjISsg9mR4GWtQEhdbn6Pgg= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= @@ -158,11 +179,13 @@ github.com/pjbgf/sha1cd v0.3.2 h1:a9wb0bp1oC2TGwStyn0Umc/IGKQnEgF0vVaZ8QF8eo4= github.com/pjbgf/sha1cd v0.3.2/go.mod h1:zQWigSxVmsHEZow5qaLtPYxpcKMMQpa09ixqBxuCS6A= github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ= github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU= +github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/quasilyte/go-ruleguard/dsl v0.3.22 h1:wd8zkOhSNr+I+8Qeciml08ivDt1pSXe60+5DqOpCjPE= github.com/quasilyte/go-ruleguard/dsl v0.3.22/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= +github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M= github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= @@ -193,32 +216,51 @@ github.com/zclconf/go-cty v1.16.2 h1:LAJSwc3v81IRBZyUVQDUdZ7hs3SYs9jv0eZJDWHD/70 github.com/zclconf/go-cty v1.16.2/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= +go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= +go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 h1:4Pp6oUg3+e/6M4C0A/3kJ2VYa++dsWVTtGgLVj5xtHg= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0/go.mod h1:Mjt1i1INqiaoZOMGR1RIUJN+i3ChKoFRqzrRQhlkbs0= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 h1:q4XOmH/0opmeuJtPsbFNivyl7bCt7yRBbeEm2sC/XtQ= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 h1:jq9TW8u3so/bN+JPT166wjOI6/vQPF6Xe7nMNIltagk= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0/go.mod h1:p8pYQP+m5XfbZm9fxtSKAbM6oIllS7s2AfxrChvc7iw= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 h1:F7Jx+6hwnZ41NSFTO5q4LYDtJRXBf2PD0rNBkeB/lus= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0/go.mod h1:UHB22Z8QsdRDrnAtX4PntOl36ajSxcdUMt1sF7Y6E7Q= go.opentelemetry.io/otel v1.31.0 h1:NsJcKPIW0D0H3NgzPDHmo0WW6SptzPdqg/L1zsIm2hY= go.opentelemetry.io/otel v1.31.0/go.mod h1:O0C14Yl9FgkjqcCZAsE053C13OaddMYr/hz6clDkEJE= +go.opentelemetry.io/otel v1.36.0 h1:UumtzIklRBY6cI/lllNZlALOF5nNIzJVb16APdvgTXg= +go.opentelemetry.io/otel v1.36.0/go.mod h1:/TcFMXYjyRNh8khOAO9ybYkqaDBb/70aVwkNML4pP8E= go.opentelemetry.io/otel/metric v1.31.0 h1:FSErL0ATQAmYHUIzSezZibnyVlft1ybhy4ozRPcF2fE= go.opentelemetry.io/otel/metric v1.31.0/go.mod h1:C3dEloVbLuYoX41KpmAhOqNriGbA+qqH6PQ5E5mUfnY= +go.opentelemetry.io/otel/metric v1.36.0 h1:MoWPKVhQvJ+eeXWHFBOPoBOi20jh6Iq2CcCREuTYufE= +go.opentelemetry.io/otel/metric v1.36.0/go.mod h1:zC7Ks+yeyJt4xig9DEw9kuUFe5C3zLbVjV2PzT6qzbs= go.opentelemetry.io/otel/sdk v1.31.0 h1:xLY3abVHYZ5HSfOg3l2E5LUj2Cwva5Y7yGxnSW9H5Gk= go.opentelemetry.io/otel/sdk v1.31.0/go.mod h1:TfRbMdhvxIIr/B2N2LQW2S5v9m3gOQ/08KsbbO5BPT0= +go.opentelemetry.io/otel/sdk v1.36.0 h1:b6SYIuLRs88ztox4EyrvRti80uXIFy+Sqzoh9kFULbs= go.opentelemetry.io/otel/sdk/metric v1.31.0 h1:i9hxxLJF/9kkvfHppyLL55aW7iIJz4JjxTeYusH7zMc= go.opentelemetry.io/otel/sdk/metric v1.31.0/go.mod h1:CRInTMVvNhUKgSAMbKyTMxqOBC0zgyxzW55lZzX43Y8= +go.opentelemetry.io/otel/sdk/metric v1.36.0 h1:r0ntwwGosWGaa0CrSt8cuNuTcccMXERFwHX4dThiPis= go.opentelemetry.io/otel/trace v1.31.0 h1:ffjsj1aRouKewfr85U2aGagJ46+MvodynlQ1HYdmJys= go.opentelemetry.io/otel/trace v1.31.0/go.mod h1:TXZkRk7SM2ZQLtR6eoAWQFIHPvzQ06FJAsO1tJg480A= +go.opentelemetry.io/otel/trace v1.36.0 h1:ahxWNuqZjpdiFAyrIoQ4GIiAIhxAunQR6MUoKrsNd4w= +go.opentelemetry.io/otel/trace v1.36.0/go.mod h1:gQ+OnDZzrybY4k4seLzPAWNwVBBVlF2szhehOBB/tGA= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34= golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc= +golang.org/x/crypto v0.39.0 h1:SHs+kF4LP+f+p14esP5jAoDpHU8Gu/v9lFRK6IT5imM= +golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 h1:LfspQV/FYTatPTr/3HzIcmiUFH7PGP+OQ6mgDYo3yuQ= golang.org/x/exp v0.0.0-20240222234643-814bf88cf225/go.mod h1:CxmFvTBINI24O/j8iY7H1xHzx2i4OsyguNBmN/uPtqc= +golang.org/x/exp v0.0.0-20250606033433-dcc06ee1d476 h1:bsqhLWFR6G6xiQcb+JoGqdKdRU6WzPWmK8E0jxTjzo4= +golang.org/x/exp v0.0.0-20250606033433-dcc06ee1d476/go.mod h1:3//PLf8L/X+8b4vuAfHzxeRUl04Adcb341+IGKfnqS8= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU= golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= +golang.org/x/mod v0.25.0 h1:n7a+ZbQKQA/Ysbyb0/6IbB1H/X41mKgbhfv7AfG/44w= +golang.org/x/mod v0.25.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -227,6 +269,8 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.38.0 h1:vRMAPTMaeGqVhG5QyLJHqNDwecKTomGeqbnfZyKlBI8= golang.org/x/net v0.38.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= +golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw= +golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI= golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU= @@ -235,6 +279,8 @@ golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.14.0 h1:woo0S4Yywslg6hp4eUFjTVOyKt0RookbpAHG4c1HmhQ= golang.org/x/sync v0.14.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8= +golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -253,8 +299,12 @@ golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.25.0 h1:qVyWApTSYLk/drJRO5mDlNYskwQznZmkpV2c8q9zls4= golang.org/x/text v0.25.0/go.mod h1:WEdwpYrmk1qmdHvhkSTNPm3app7v4rsT8F2UD6+VHIA= +golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M= +golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA= golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= +golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE= +golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= @@ -262,9 +312,13 @@ golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3 golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.31.0 h1:0EedkvKDbh+qistFTd0Bcwe/YLh4vHwWEkiI0toFIBU= golang.org/x/tools v0.31.0/go.mod h1:naFTU+Cev749tSJRXJlna0T3WxKvb1kWEx15xA4SdmQ= +golang.org/x/tools v0.34.0 h1:qIpSLOxeCYGg9TrcJokLBG4KFA6d795g0xkBkiESGlo= +golang.org/x/tools v0.34.0/go.mod h1:pAP9OwEaY1CAW3HOmg3hLZC5Z0CCmzjAF2UQMSqNARg= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/api v0.182.0 h1:if5fPvudRQ78GeRx3RayIoiuV7modtErPIZC/T2bIvE= google.golang.org/api v0.182.0/go.mod h1:cGhjy4caqA5yXRzEhkHI8Y9mfyC2VLTlER2l08xaqtM= +google.golang.org/api v0.236.0 h1:CAiEiDVtO4D/Qja2IA9VzlFrgPnK3XVMmRoJZlSWbc0= +google.golang.org/api v0.236.0/go.mod h1:X1WF9CU2oTc+Jml1tiIxGmWFK/UZezdqEu09gcxZAj4= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= @@ -272,6 +326,8 @@ google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98 google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/genproto/googleapis/rpc v0.0.0-20241015192408-796eee8c2d53 h1:X58yt85/IXCx0Y3ZwN6sEIKZzQtDEYaBWrDvErdXrRE= google.golang.org/genproto/googleapis/rpc v0.0.0-20241015192408-796eee8c2d53/go.mod h1:GX3210XPVPUjJbTUbvwI8f2IpZDMZuPJWDzDuebbviI= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 h1:fc6jSaCT0vBduLYZHYrBBNY4dsWuvgyff9noRNDdBeE= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= @@ -279,6 +335,8 @@ google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8 google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= google.golang.org/grpc v1.69.4 h1:MF5TftSMkd8GLw/m0KM6V8CMOCY6NZ1NQDPGFgbTt4A= google.golang.org/grpc v1.69.4/go.mod h1:vyjdE6jLBI76dgpDojsFGNaHlxdjXN9ghpnd2o7JGZ4= +google.golang.org/grpc v1.73.0 h1:VIWSmpI2MegBtTuFt5/JWy2oXxtjJ/e89Z70ImfD2ok= +google.golang.org/grpc v1.73.0/go.mod h1:50sbHOUqWoCQGI8V2HQLJM0B+LMlIUjNSZmow7EVBQc= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -290,9 +348,12 @@ google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpAD google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.36.3 h1:82DV7MYdb8anAVi3qge1wSnMDrnKK7ebr+I0hHRN1BU= google.golang.org/protobuf v1.36.3/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= +google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= +google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= From 43d08e6ed57e14cea6fb39bc0c79b7197d06d013 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Thu, 12 Jun 2025 15:39:27 +0200 Subject: [PATCH 08/22] minor cleanup --- bundle/internal/validation/enum.go | 17 +---------------- bundle/internal/validation/main.go | 10 +++++----- bundle/internal/validation/required.go | 8 ++++---- 3 files changed, 10 insertions(+), 25 deletions(-) diff --git a/bundle/internal/validation/enum.go b/bundle/internal/validation/enum.go index 7c88e5093c..8a0f0e67c8 100644 --- a/bundle/internal/validation/enum.go +++ b/bundle/internal/validation/enum.go @@ -7,7 +7,6 @@ import ( "path/filepath" "reflect" "sort" - "strings" "text/template" "github.com/databricks/cli/bundle/config" @@ -127,26 +126,12 @@ func extractEnumFields(typ reflect.Type) ([]EnumPatternInfo, error) { return patterns, nil } -// getEnumGroupingKey determines the grouping key for organizing patterns -// TODO: Combine with the required function. -func getEnumGroupingKey(pattern string) string { - parts := strings.Split(pattern, ".") - - // Group resources by their resource type (e.g., "resources.jobs") - if parts[0] == "resources" && len(parts) > 1 { - return parts[0] + "." + parts[1] - } - - // Use the top level key for other fields - return parts[0] -} - // groupEnumPatternsByKey groups patterns by their logical grouping key func groupEnumPatternsByKey(patterns []EnumPatternInfo) map[string][]EnumPatternInfo { groupedPatterns := make(map[string][]EnumPatternInfo) for _, pattern := range patterns { - key := getEnumGroupingKey(pattern.Pattern) + key := getPatternGroupingKey(pattern.Pattern) groupedPatterns[key] = append(groupedPatterns[key], pattern) } diff --git a/bundle/internal/validation/main.go b/bundle/internal/validation/main.go index 0b8f5645f8..797e5f7e08 100644 --- a/bundle/internal/validation/main.go +++ b/bundle/internal/validation/main.go @@ -6,12 +6,12 @@ import ( // This package is meant to be run from the root of the CLI repo. func main() { - // err := generateRequiredFields("bundle/internal/validation/generated") - // if err != nil { - // log.Fatalf("Error generating code: %v", err) - // } + err := generateRequiredFields("bundle/internal/validation/generated") + if err != nil { + log.Fatalf("Error generating code: %v", err) + } - err := generateEnumFields("bundle/internal/validation/generated") + err = generateEnumFields("bundle/internal/validation/generated") if err != nil { log.Fatalf("Error generating code: %v", err) } diff --git a/bundle/internal/validation/required.go b/bundle/internal/validation/required.go index 697de710c6..84f0a93d44 100644 --- a/bundle/internal/validation/required.go +++ b/bundle/internal/validation/required.go @@ -87,9 +87,9 @@ func buildPatternInfos(fieldsByPattern map[string][]string) []RequiredPatternInf return patterns } -// getGroupingKey determines the grouping key for organizing patterns -func getGroupingKey(parentPath string) string { - parts := strings.Split(parentPath, ".") +// getPatternGroupingKey determines the grouping key for organizing patterns +func getPatternGroupingKey(patternPath string) string { + parts := strings.Split(patternPath, ".") // Group resources by their resource type (e.g., "resources.jobs") if parts[0] == "resources" && len(parts) > 1 { @@ -105,7 +105,7 @@ func groupPatternsByKey(patterns []RequiredPatternInfo) map[string][]RequiredPat groupedPatterns := make(map[string][]RequiredPatternInfo) for _, pattern := range patterns { - key := getGroupingKey(pattern.Parent) + key := getPatternGroupingKey(pattern.Parent) groupedPatterns[key] = append(groupedPatterns[key], pattern) } From 4883c88da379726b6e20b03f49c8973f71365543 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Thu, 12 Jun 2025 15:52:58 +0200 Subject: [PATCH 09/22] Validate enum bundle fields --- bundle/config/validate/enum.go | 88 ++++++++++++++++++++++++++++++++++ 1 file changed, 88 insertions(+) create mode 100644 bundle/config/validate/enum.go diff --git a/bundle/config/validate/enum.go b/bundle/config/validate/enum.go new file mode 100644 index 0000000000..3a9ed750d0 --- /dev/null +++ b/bundle/config/validate/enum.go @@ -0,0 +1,88 @@ +package validate + +import ( + "context" + "fmt" + "sort" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/internal/validation/generated" + "github.com/databricks/cli/libs/diag" + "github.com/databricks/cli/libs/dyn" +) + +type enum struct{} + +func Enum() bundle.Mutator { + return &enum{} +} + +func (f *enum) Name() string { + return "validate:enum" +} + +func (f *enum) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + diags := diag.Diagnostics{} + + for k, validValues := range generated.EnumFields { + pattern, err := dyn.NewPatternFromString(k) + if err != nil { + return diag.FromErr(fmt.Errorf("invalid pattern %q for enum field validation: %w", k, err)) + } + + _, err = dyn.MapByPattern(b.Config.Value(), pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { + // Skip validation if the value is not set + if v.Kind() == dyn.KindInvalid || v.Kind() == dyn.KindNil { + return v, nil + } + + // Get the string value for comparison + strValue, ok := v.AsString() + if !ok { + return v, nil + } + + // Check if the value is in the list of valid enum values + validValue := false + for _, valid := range validValues { + if strValue == valid { + validValue = true + break + } + } + + if !validValue { + diags = diags.Append(diag.Diagnostic{ + Severity: diag.Warning, + Summary: fmt.Sprintf("invalid value %q for enum field. Valid values are %v", strValue, validValues), + Locations: v.Locations(), + Paths: []dyn.Path{p}, + }) + } + + return v, nil + }) + if dyn.IsExpectedMapError(err) || dyn.IsExpectedSequenceError(err) || dyn.IsExpectedMapToIndexError(err) || dyn.IsExpectedSequenceToIndexError(err) { + // No map or sequence value is set at this pattern, so we ignore it. + continue + } + if err != nil { + return diag.FromErr(err) + } + } + + // Sort diagnostics to make them deterministic + sort.Slice(diags, func(i, j int) bool { + // First sort by summary + if diags[i].Summary != diags[j].Summary { + return diags[i].Summary < diags[j].Summary + } + + // Then sort by locations as a tie breaker if summaries are the same. + iLocs := fmt.Sprintf("%v", diags[i].Locations) + jLocs := fmt.Sprintf("%v", diags[j].Locations) + return iLocs < jLocs + }) + + return diags +} From 4185e7ed79af2c0c86ca413f1bdeba77082e1116 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Fri, 25 Jul 2025 17:01:57 +0200 Subject: [PATCH 10/22] add test --- .../bundle/validate/enum/databricks.yml | 56 +++++++++++++ acceptance/bundle/validate/enum/out.test.toml | 5 ++ acceptance/bundle/validate/enum/output.txt | 41 +++++++++ acceptance/bundle/validate/enum/script | 1 + bundle/config/validate/enum.go | 84 +++++++++++-------- bundle/phases/initialize.go | 3 + go.sum | 2 - 7 files changed, 156 insertions(+), 36 deletions(-) create mode 100644 acceptance/bundle/validate/enum/databricks.yml create mode 100644 acceptance/bundle/validate/enum/out.test.toml create mode 100644 acceptance/bundle/validate/enum/output.txt create mode 100644 acceptance/bundle/validate/enum/script diff --git a/acceptance/bundle/validate/enum/databricks.yml b/acceptance/bundle/validate/enum/databricks.yml new file mode 100644 index 0000000000..19a8df8b34 --- /dev/null +++ b/acceptance/bundle/validate/enum/databricks.yml @@ -0,0 +1,56 @@ +variables: + my_variable: + type: "complex" + default: + key: "value" + + my_variable_invalid: + type: "INVALID_TYPE" + default: "value" + + my_variable_type_missing: + default: "value" + + my_variable_type_empty: + type: "" + default: "value" + +resources: + jobs: + my_job_valid: + tasks: + - task_key: "task1" + # Valid enum value + run_if: "ALL_SUCCESS" + notebook_task: + # Valid enum value + source: "GIT" + notebook_path: "/path/to/notebook" + new_cluster: + # Valid enum values + runtime_engine: "PHOTON" + data_security_mode: "SINGLE_USER" + aws_attributes: + availability: "ON_DEMAND" + ebs_volume_type: "GENERAL_PURPOSE_SSD" + node_type_id: "i3.xlarge" + num_workers: 1 + + my_job_invalid: + tasks: + - task_key: "task2" + # Invalid enum value - should trigger warning + run_if: "INVALID_CONDITION" + notebook_task: + # Invalid enum value - should trigger warning + source: "INVALID_SOURCE" + notebook_path: "/path/to/notebook" + new_cluster: + # Invalid enum values - should trigger warnings + runtime_engine: "INVALID_ENGINE" + data_security_mode: "INVALID_MODE" + aws_attributes: + availability: "INVALID_AVAILABILITY" + ebs_volume_type: "INVALID_VOLUME_TYPE" + node_type_id: "i3.xlarge" + num_workers: 1 diff --git a/acceptance/bundle/validate/enum/out.test.toml b/acceptance/bundle/validate/enum/out.test.toml new file mode 100644 index 0000000000..8f3575be7b --- /dev/null +++ b/acceptance/bundle/validate/enum/out.test.toml @@ -0,0 +1,5 @@ +Local = true +Cloud = false + +[EnvMatrix] + DATABRICKS_CLI_DEPLOYMENT = ["terraform", "direct-exp"] diff --git a/acceptance/bundle/validate/enum/output.txt b/acceptance/bundle/validate/enum/output.txt new file mode 100644 index 0000000000..f02966b4f6 --- /dev/null +++ b/acceptance/bundle/validate/enum/output.txt @@ -0,0 +1,41 @@ + +>>> [CLI] bundle validate +Warning: invalid value "" for enum field. Valid values are [complex] + at variables.my_variable_type_empty.type + in databricks.yml:45:11 + +Warning: invalid value "INVALID_AVAILABILITY" for enum field. Valid values are [ON_DEMAND SPOT SPOT_WITH_FALLBACK] + at resources.jobs.my_job_invalid.tasks[0].new_cluster.aws_attributes.availability + in databricks.yml:9:29 + +Warning: invalid value "INVALID_CONDITION" for enum field. Valid values are [ALL_DONE ALL_FAILED ALL_SUCCESS AT_LEAST_ONE_FAILED AT_LEAST_ONE_SUCCESS NONE_FAILED] + at resources.jobs.my_job_invalid.tasks[0].run_if + in databricks.yml:18:19 + +Warning: invalid value "INVALID_ENGINE" for enum field. Valid values are [NULL PHOTON STANDARD] + at resources.jobs.my_job_invalid.tasks[0].new_cluster.runtime_engine + in databricks.yml:14:29 + +Warning: invalid value "INVALID_MODE" for enum field. Valid values are [DATA_SECURITY_MODE_AUTO DATA_SECURITY_MODE_DEDICATED DATA_SECURITY_MODE_STANDARD LEGACY_PASSTHROUGH LEGACY_SINGLE_USER LEGACY_SINGLE_USER_STANDARD LEGACY_TABLE_ACL NONE SINGLE_USER USER_ISOLATION] + at resources.jobs.my_job_invalid.tasks[0].new_cluster.data_security_mode + in databricks.yml:11:33 + +Warning: invalid value "INVALID_SOURCE" for enum field. Valid values are [GIT WORKSPACE] + at resources.jobs.my_job_invalid.tasks[0].notebook_task.source + in databricks.yml:17:21 + +Warning: invalid value "INVALID_TYPE" for enum field. Valid values are [complex] + at variables.my_variable_invalid.type + in databricks.yml:42:11 + +Warning: invalid value "INVALID_VOLUME_TYPE" for enum field. Valid values are [GENERAL_PURPOSE_SSD THROUGHPUT_OPTIMIZED_HDD] + at resources.jobs.my_job_invalid.tasks[0].new_cluster.aws_attributes.ebs_volume_type + in databricks.yml:10:32 + +Name: test-bundle +Target: default +Workspace: + User: [USERNAME] + Path: /Workspace/Users/[USERNAME]/.bundle/test-bundle/default + +Found 8 warnings diff --git a/acceptance/bundle/validate/enum/script b/acceptance/bundle/validate/enum/script new file mode 100644 index 0000000000..5350876150 --- /dev/null +++ b/acceptance/bundle/validate/enum/script @@ -0,0 +1 @@ +trace $CLI bundle validate diff --git a/bundle/config/validate/enum.go b/bundle/config/validate/enum.go index 3a9ed750d0..c9eb7e711b 100644 --- a/bundle/config/validate/enum.go +++ b/bundle/config/validate/enum.go @@ -3,6 +3,7 @@ package validate import ( "context" "fmt" + "slices" "sort" "github.com/databricks/cli/bundle" @@ -24,51 +25,66 @@ func (f *enum) Name() string { func (f *enum) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { diags := diag.Diagnostics{} - for k, validValues := range generated.EnumFields { + // Generate prefix tree for all enum fields. + trie := &dyn.TrieNode{} + for k := range generated.EnumFields { pattern, err := dyn.NewPatternFromString(k) if err != nil { return diag.FromErr(fmt.Errorf("invalid pattern %q for enum field validation: %w", k, err)) } - _, err = dyn.MapByPattern(b.Config.Value(), pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { - // Skip validation if the value is not set - if v.Kind() == dyn.KindInvalid || v.Kind() == dyn.KindNil { - return v, nil - } + err = trie.Insert(pattern) + if err != nil { + return diag.FromErr(fmt.Errorf("failed to insert pattern %q into trie: %w", k, err)) + } + } - // Get the string value for comparison - strValue, ok := v.AsString() - if !ok { - return v, nil - } + err := dyn.WalkReadOnly(b.Config.Value(), func(p dyn.Path, v dyn.Value) error { + // If the path is not found in the prefix tree, we do not need to validate any enum + // fields in it. + pattern, ok := trie.SearchPath(p) + if !ok { + return nil + } - // Check if the value is in the list of valid enum values - validValue := false - for _, valid := range validValues { - if strValue == valid { - validValue = true - break - } - } + // Skip validation if the value is not set + if v.Kind() == dyn.KindInvalid || v.Kind() == dyn.KindNil { + return nil + } - if !validValue { - diags = diags.Append(diag.Diagnostic{ - Severity: diag.Warning, - Summary: fmt.Sprintf("invalid value %q for enum field. Valid values are %v", strValue, validValues), - Locations: v.Locations(), - Paths: []dyn.Path{p}, - }) - } + // Get the string value for comparison + strValue, ok := v.AsString() + if !ok { + return nil + } + + cloneP := slices.Clone(p) - return v, nil - }) - if dyn.IsExpectedMapError(err) || dyn.IsExpectedSequenceError(err) || dyn.IsExpectedMapToIndexError(err) || dyn.IsExpectedSequenceToIndexError(err) { - // No map or sequence value is set at this pattern, so we ignore it. - continue + // Get valid values for this pattern + validValues := generated.EnumFields[pattern.String()] + + // Check if the value is in the list of valid enum values + validValue := false + for _, valid := range validValues { + if strValue == valid { + validValue = true + break + } } - if err != nil { - return diag.FromErr(err) + + if !validValue { + diags = diags.Append(diag.Diagnostic{ + Severity: diag.Warning, + Summary: fmt.Sprintf("invalid value %q for enum field. Valid values are %v", strValue, validValues), + Locations: v.Locations(), + Paths: []dyn.Path{cloneP}, + }) } + + return nil + }) + if err != nil { + return diag.FromErr(err) } // Sort diagnostics to make them deterministic diff --git a/bundle/phases/initialize.go b/bundle/phases/initialize.go index 597302c38d..7a711b43f5 100644 --- a/bundle/phases/initialize.go +++ b/bundle/phases/initialize.go @@ -152,6 +152,9 @@ func Initialize(ctx context.Context, b *bundle.Bundle) { // since they can also set and modify resources. validate.Required(), + // Validate that all fields with enum values specified are set to a valid value. + validate.Enum(), + // Reads (typed): b.Config.Permissions (checks if current user or their groups have CAN_MANAGE permissions) // Reads (typed): b.Config.Workspace.CurrentUser (gets current user information) // Provides diagnostic recommendations if the current deployment identity isn't explicitly granted CAN_MANAGE permissions diff --git a/go.sum b/go.sum index 78c16bc9b1..7c2f6e3000 100644 --- a/go.sum +++ b/go.sum @@ -31,7 +31,6 @@ github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMn github.com/cloudflare/circl v1.6.1 h1:zqIqSPIndyBh1bjLVVDHMPpVKqp8Su/V+6MeDzzQBQ0= github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs= github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= -github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s= github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI= github.com/databricks/databricks-sdk-go v0.75.0 h1:BIRSPmUNtkSqAywFPOIsy2Oq+C9xc+X6TAGGYpKXuBo= @@ -130,7 +129,6 @@ github.com/pjbgf/sha1cd v0.3.2 h1:a9wb0bp1oC2TGwStyn0Umc/IGKQnEgF0vVaZ8QF8eo4= github.com/pjbgf/sha1cd v0.3.2/go.mod h1:zQWigSxVmsHEZow5qaLtPYxpcKMMQpa09ixqBxuCS6A= github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ= github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU= -github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/quasilyte/go-ruleguard/dsl v0.3.22 h1:wd8zkOhSNr+I+8Qeciml08ivDt1pSXe60+5DqOpCjPE= From fae01ce3c7934cb4cbcd12e004954f03a427314b Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Fri, 25 Jul 2025 17:05:48 +0200 Subject: [PATCH 11/22] - --- NEXT_CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index cf1a8462ac..c3ad418f11 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -10,5 +10,6 @@ * Fixed auth login ignoring DATABRICKS_CONFIG_FILE environmental variable when saving profile ([#3266](https://github.com/databricks/cli/pull/3266)) ### Bundles +* Add warning when invalid value is specified for enum field ([#3050](https://github.com/databricks/cli/pull/3050)) ### API Changes From 8b09d0ba2d9ceee4f60ba46c2b5bf9ef1baee88a Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Fri, 25 Jul 2025 17:20:51 +0200 Subject: [PATCH 12/22] update test golden files --- .../artifacts/artifacts_dynamic_version/databricks.yml | 1 - .../bundle/artifacts/artifacts_dynamic_version/output.txt | 3 +-- acceptance/bundle/artifacts/shell/invalid/output.txt | 4 ++++ acceptance/bundle/debug/direct/out.stderr.txt | 1 + acceptance/bundle/debug/direct/output.txt | 2 +- acceptance/bundle/debug/tf/out.stderr.txt | 1 + acceptance/bundle/validate/volume_defaults/output.txt | 8 ++++++++ 7 files changed, 16 insertions(+), 4 deletions(-) diff --git a/acceptance/bundle/artifacts/artifacts_dynamic_version/databricks.yml b/acceptance/bundle/artifacts/artifacts_dynamic_version/databricks.yml index c2134924c2..c5ca16a560 100644 --- a/acceptance/bundle/artifacts/artifacts_dynamic_version/databricks.yml +++ b/acceptance/bundle/artifacts/artifacts_dynamic_version/databricks.yml @@ -6,5 +6,4 @@ artifacts: type: whl build: uv build --wheel second_wheel: - type: jar build: true diff --git a/acceptance/bundle/artifacts/artifacts_dynamic_version/output.txt b/acceptance/bundle/artifacts/artifacts_dynamic_version/output.txt index 8b4936fa85..9b21c7b8c6 100644 --- a/acceptance/bundle/artifacts/artifacts_dynamic_version/output.txt +++ b/acceptance/bundle/artifacts/artifacts_dynamic_version/output.txt @@ -12,7 +12,6 @@ }, "second_wheel": { "build": "true", - "path": "[TEST_TMP_DIR]", - "type": "jar" + "path": "[TEST_TMP_DIR]" } } diff --git a/acceptance/bundle/artifacts/shell/invalid/output.txt b/acceptance/bundle/artifacts/shell/invalid/output.txt index 60a69cda00..e9fa6ad465 100644 --- a/acceptance/bundle/artifacts/shell/invalid/output.txt +++ b/acceptance/bundle/artifacts/shell/invalid/output.txt @@ -1,5 +1,9 @@ >>> [CLI] bundle deploy +Warning: invalid value "invalid" for enum field. Valid values are [bash sh cmd] + at artifacts.my_artifact.executable + in databricks.yml:6:17 + Building my_artifact... Error: invalid is not supported as an artifact executable, options are: bash, sh or cmd diff --git a/acceptance/bundle/debug/direct/out.stderr.txt b/acceptance/bundle/debug/direct/out.stderr.txt index d3e3d47a6a..cd031fbb52 100644 --- a/acceptance/bundle/debug/direct/out.stderr.txt +++ b/acceptance/bundle/debug/direct/out.stderr.txt @@ -51,6 +51,7 @@ 10:07:59 Debug: Apply pid=12345 mutator=PythonMutator(load_resources) 10:07:59 Debug: Apply pid=12345 mutator=PythonMutator(apply_mutators) 10:07:59 Debug: Apply pid=12345 mutator=validate:required +10:07:59 Debug: Apply pid=12345 mutator=validate:enum 10:07:59 Debug: Apply pid=12345 mutator=CheckPermissions 10:07:59 Debug: Apply pid=12345 mutator=TranslatePaths 10:07:59 Debug: Apply pid=12345 mutator=PythonWrapperWarning diff --git a/acceptance/bundle/debug/direct/output.txt b/acceptance/bundle/debug/direct/output.txt index 85294af45b..26a86de7b0 100644 --- a/acceptance/bundle/debug/direct/output.txt +++ b/acceptance/bundle/debug/direct/output.txt @@ -14,7 +14,7 @@ Validation OK! +>>> [CLI] bundle validate --debug 10:07:59 Info: start pid=12345 version=[DEV_VERSION] args="[CLI], bundle, validate, --debug" 10:07:59 Debug: Found bundle root at [TEST_TMP_DIR] (file [TEST_TMP_DIR]/databricks.yml) pid=12345 -@@ -61,8 +63,4 @@ +@@ -62,8 +64,4 @@ 10:07:59 Debug: Apply pid=12345 mutator=metadata.AnnotateJobs 10:07:59 Debug: Apply pid=12345 mutator=metadata.AnnotatePipelines -10:07:59 Debug: Apply pid=12345 mutator=terraform.Initialize diff --git a/acceptance/bundle/debug/tf/out.stderr.txt b/acceptance/bundle/debug/tf/out.stderr.txt index 18c6a2cfb5..dab0f29379 100644 --- a/acceptance/bundle/debug/tf/out.stderr.txt +++ b/acceptance/bundle/debug/tf/out.stderr.txt @@ -49,6 +49,7 @@ 10:07:59 Debug: Apply pid=12345 mutator=PythonMutator(load_resources) 10:07:59 Debug: Apply pid=12345 mutator=PythonMutator(apply_mutators) 10:07:59 Debug: Apply pid=12345 mutator=validate:required +10:07:59 Debug: Apply pid=12345 mutator=validate:enum 10:07:59 Debug: Apply pid=12345 mutator=CheckPermissions 10:07:59 Debug: Apply pid=12345 mutator=TranslatePaths 10:07:59 Debug: Apply pid=12345 mutator=PythonWrapperWarning diff --git a/acceptance/bundle/validate/volume_defaults/output.txt b/acceptance/bundle/validate/volume_defaults/output.txt index ca01eb5457..60cd668c7d 100644 --- a/acceptance/bundle/validate/volume_defaults/output.txt +++ b/acceptance/bundle/validate/volume_defaults/output.txt @@ -34,6 +34,14 @@ Warning: required field "schema_name" is not set at resources.volumes.v2 in databricks.yml:8:7 +Warning: invalid value "" for enum field. Valid values are [EXTERNAL MANAGED] + at resources.volumes.v1.volume_type + in databricks.yml:6:20 + +Warning: invalid value "already-set" for enum field. Valid values are [EXTERNAL MANAGED] + at resources.volumes.v2.volume_type + in databricks.yml:8:20 + { "v1": { "volume_type": "" From a788da5d762fcc94b8064302b14f7badb298a6fd Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Fri, 25 Jul 2025 17:26:17 +0200 Subject: [PATCH 13/22] - --- NEXT_CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index c3ad418f11..865ce58539 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -10,6 +10,6 @@ * Fixed auth login ignoring DATABRICKS_CONFIG_FILE environmental variable when saving profile ([#3266](https://github.com/databricks/cli/pull/3266)) ### Bundles -* Add warning when invalid value is specified for enum field ([#3050](https://github.com/databricks/cli/pull/3050)) +* Add warning when an invalid value is specified for enum field ([#3050](https://github.com/databricks/cli/pull/3050)) ### API Changes From e66cf3383056da0b2fcb4de473f129fc4064c334 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Wed, 6 Aug 2025 11:29:51 +0200 Subject: [PATCH 14/22] address comments 1 --- acceptance/bundle/debug/direct/output.txt | 2 +- acceptance/bundle/validate/enum/output.txt | 6 +----- bundle/config/validate/enum.go | 10 +++------- 3 files changed, 5 insertions(+), 13 deletions(-) diff --git a/acceptance/bundle/debug/direct/output.txt b/acceptance/bundle/debug/direct/output.txt index 26a86de7b0..ddb7505f89 100644 --- a/acceptance/bundle/debug/direct/output.txt +++ b/acceptance/bundle/debug/direct/output.txt @@ -14,7 +14,7 @@ Validation OK! +>>> [CLI] bundle validate --debug 10:07:59 Info: start pid=12345 version=[DEV_VERSION] args="[CLI], bundle, validate, --debug" 10:07:59 Debug: Found bundle root at [TEST_TMP_DIR] (file [TEST_TMP_DIR]/databricks.yml) pid=12345 -@@ -62,8 +64,4 @@ +@@ -63,8 +65,4 @@ 10:07:59 Debug: Apply pid=12345 mutator=metadata.AnnotateJobs 10:07:59 Debug: Apply pid=12345 mutator=metadata.AnnotatePipelines -10:07:59 Debug: Apply pid=12345 mutator=terraform.Initialize diff --git a/acceptance/bundle/validate/enum/output.txt b/acceptance/bundle/validate/enum/output.txt index f02966b4f6..2943e29b3f 100644 --- a/acceptance/bundle/validate/enum/output.txt +++ b/acceptance/bundle/validate/enum/output.txt @@ -1,9 +1,5 @@ >>> [CLI] bundle validate -Warning: invalid value "" for enum field. Valid values are [complex] - at variables.my_variable_type_empty.type - in databricks.yml:45:11 - Warning: invalid value "INVALID_AVAILABILITY" for enum field. Valid values are [ON_DEMAND SPOT SPOT_WITH_FALLBACK] at resources.jobs.my_job_invalid.tasks[0].new_cluster.aws_attributes.availability in databricks.yml:9:29 @@ -38,4 +34,4 @@ Workspace: User: [USERNAME] Path: /Workspace/Users/[USERNAME]/.bundle/test-bundle/default -Found 8 warnings +Found 7 warnings diff --git a/bundle/config/validate/enum.go b/bundle/config/validate/enum.go index c9eb7e711b..cea022385e 100644 --- a/bundle/config/validate/enum.go +++ b/bundle/config/validate/enum.go @@ -58,19 +58,15 @@ func (f *enum) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { return nil } + // p is a slice of path components. We need to clone it before using it in diagnostics + // since the WalkReadOnly function will mutate it while walking the config tree. cloneP := slices.Clone(p) // Get valid values for this pattern validValues := generated.EnumFields[pattern.String()] // Check if the value is in the list of valid enum values - validValue := false - for _, valid := range validValues { - if strValue == valid { - validValue = true - break - } - } + validValue := slices.Contains(validValues, strValue) if !validValue { diags = diags.Append(diag.Diagnostic{ From 13a9bdafd3b694334941489eca0600a9fc4914c5 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Wed, 6 Aug 2025 11:45:40 +0200 Subject: [PATCH 15/22] add benchmarks --- bundle/internal/bundletest/benchmark.go | 5 +++ bundle/internal/bundletest/benchmark_test.go | 31 +++++++++++++++++++ .../bundletest/mutator_benchmark_test.go | 20 ++++++++++++ 3 files changed, 56 insertions(+) diff --git a/bundle/internal/bundletest/benchmark.go b/bundle/internal/bundletest/benchmark.go index 6a6b40e4c7..1041995756 100644 --- a/bundle/internal/bundletest/benchmark.go +++ b/bundle/internal/bundletest/benchmark.go @@ -250,6 +250,11 @@ func Bundle(b *testing.B, numJobs int) *bundle.Bundle { myBundle := bundle.Bundle{ Config: config.Root{ + Artifacts: config.Artifacts{ + "artifact.whl": { + Type: "unsupported_type", + }, + }, Resources: config.Resources{ Jobs: allJobs, }, diff --git a/bundle/internal/bundletest/benchmark_test.go b/bundle/internal/bundletest/benchmark_test.go index 05999daf6f..75b121e095 100644 --- a/bundle/internal/bundletest/benchmark_test.go +++ b/bundle/internal/bundletest/benchmark_test.go @@ -3,6 +3,7 @@ package bundletest import ( "testing" + "github.com/databricks/cli/bundle/internal/validation/generated" "github.com/databricks/cli/libs/dyn" "github.com/stretchr/testify/assert" ) @@ -30,3 +31,33 @@ func BenchmarkWalk(b *testing.B) { assert.NoError(b, err) } } + +// This took 49 microseconds to run on 6th Aug 2025. +func BenchmarkEnumPrefixTree(b *testing.B) { + for b.Loop() { + // Generate prefix tree for all enum fields. + trie := &dyn.TrieNode{} + for k := range generated.EnumFields { + pattern, err := dyn.NewPatternFromString(k) + assert.NoError(b, err) + + err = trie.Insert(pattern) + assert.NoError(b, err) + } + } +} + +// This took 15 microseconds to run on 6th Aug 2025. +func BenchmarkRequiredPrefixTree(b *testing.B) { + for b.Loop() { + // Generate prefix tree for all required fields. + trie := &dyn.TrieNode{} + for k := range generated.RequiredFields { + pattern, err := dyn.NewPatternFromString(k) + assert.NoError(b, err) + + err = trie.Insert(pattern) + assert.NoError(b, err) + } + } +} diff --git a/bundle/internal/bundletest/mutator_benchmark_test.go b/bundle/internal/bundletest/mutator_benchmark_test.go index 035231c7d6..d49f644623 100644 --- a/bundle/internal/bundletest/mutator_benchmark_test.go +++ b/bundle/internal/bundletest/mutator_benchmark_test.go @@ -21,6 +21,16 @@ func benchmarkRequiredMutator(b *testing.B, numJobs int) { assert.NotEmpty(b, diags) } +func benchmarkEnumMutator(b *testing.B, numJobs int) { + myBundle := Bundle(b, numJobs) + + var diags diag.Diagnostics + for b.Loop() { + diags = bundle.Apply(context.Background(), myBundle, validate.Enum()) + } + assert.NotEmpty(b, diags) +} + func benchmarkWalkReadOnlyBaseline(b *testing.B, numJobs int) { myBundle := Bundle(b, numJobs) @@ -72,3 +82,13 @@ func BenchmarkValidateRequired100(b *testing.B) { func BenchmarkValidateRequired10(b *testing.B) { benchmarkRequiredMutator(b, 10) } + +// This benchmark took 840ms to run on 6th Aug 2025. +func BenchmarkValidateEnum10000(b *testing.B) { + benchmarkEnumMutator(b, 10000) +} + +// This benchmark took 84ms to run on 6th Aug 2025. +func BenchmarkValidateEnum1000(b *testing.B) { + benchmarkEnumMutator(b, 1000) +} From 2b6c55e4a8d645b86e732fc681bcd7477415d6ee Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Wed, 6 Aug 2025 11:51:03 +0200 Subject: [PATCH 16/22] address comments 2 --- bundle/config/validate/enum.go | 5 ----- 1 file changed, 5 deletions(-) diff --git a/bundle/config/validate/enum.go b/bundle/config/validate/enum.go index cea022385e..56a90c9d95 100644 --- a/bundle/config/validate/enum.go +++ b/bundle/config/validate/enum.go @@ -47,11 +47,6 @@ func (f *enum) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { return nil } - // Skip validation if the value is not set - if v.Kind() == dyn.KindInvalid || v.Kind() == dyn.KindNil { - return nil - } - // Get the string value for comparison strValue, ok := v.AsString() if !ok { From 7d6076479226a8776f3346959a333ed2f4fabf15 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Wed, 6 Aug 2025 11:52:14 +0200 Subject: [PATCH 17/22] - --- NEXT_CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 68296f2bad..8da4ed0f43 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -10,7 +10,7 @@ * Show "DEFAULT" as the default profile for `databricks auth login` [#3252](https://github.com/databricks/cli/pull/3252) ### Bundles -* Add warning when an invalid value is specified for enum field ([#3050](https://github.com/databricks/cli/pull/3050)) +* Add warning for when an invalid value is specified for an enum field ([#3050](https://github.com/databricks/cli/pull/3050)) * Fix panic in auto completion handler for bundler run/open. Regression introduced in 0.259.0. [#3358](https://github.com/databricks/cli/pull/3358) * Fix error propagation in the `bundle generate dashboard` command. Regression introduced in 0.259.0. [#3354](https://github.com/databricks/cli/pull/3354) From 0cdd22a81baa2868547e122cfbc757916d84831a Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Wed, 6 Aug 2025 12:36:49 +0200 Subject: [PATCH 18/22] add jar to type --- .../artifacts/artifacts_dynamic_version/databricks.yml | 1 + .../bundle/artifacts/artifacts_dynamic_version/output.txt | 7 ++++++- bundle/config/artifact.go | 2 ++ 3 files changed, 9 insertions(+), 1 deletion(-) diff --git a/acceptance/bundle/artifacts/artifacts_dynamic_version/databricks.yml b/acceptance/bundle/artifacts/artifacts_dynamic_version/databricks.yml index c5ca16a560..c2134924c2 100644 --- a/acceptance/bundle/artifacts/artifacts_dynamic_version/databricks.yml +++ b/acceptance/bundle/artifacts/artifacts_dynamic_version/databricks.yml @@ -6,4 +6,5 @@ artifacts: type: whl build: uv build --wheel second_wheel: + type: jar build: true diff --git a/acceptance/bundle/artifacts/artifacts_dynamic_version/output.txt b/acceptance/bundle/artifacts/artifacts_dynamic_version/output.txt index 9b21c7b8c6..09cce670a3 100644 --- a/acceptance/bundle/artifacts/artifacts_dynamic_version/output.txt +++ b/acceptance/bundle/artifacts/artifacts_dynamic_version/output.txt @@ -1,3 +1,7 @@ +Warning: invalid value "jar" for enum field. Valid values are [whl] + at artifacts.second_wheel.type + in databricks.yml:7:11 + { "first_wheel": { "build": "uv build --wheel", @@ -12,6 +16,7 @@ }, "second_wheel": { "build": "true", - "path": "[TEST_TMP_DIR]" + "path": "[TEST_TMP_DIR]", + "type": "jar" } } diff --git a/bundle/config/artifact.go b/bundle/config/artifact.go index 3039b55343..03529a69c2 100644 --- a/bundle/config/artifact.go +++ b/bundle/config/artifact.go @@ -10,6 +10,8 @@ type ArtifactType string const ArtifactPythonWheel ArtifactType = `whl` +const ArtifactPythonWheelDynamicVersion = "jar" + // Values returns all valid ArtifactType values func (ArtifactType) Values() []ArtifactType { return []ArtifactType{ From 16a7af6ad4d2ecb4dcfeb1242390bfb6b0e8f74c Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Wed, 6 Aug 2025 12:39:36 +0200 Subject: [PATCH 19/22] - --- bundle/config/artifact.go | 3 ++- bundle/internal/validation/generated/enum_fields.go | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/bundle/config/artifact.go b/bundle/config/artifact.go index 03529a69c2..cb029bcb4b 100644 --- a/bundle/config/artifact.go +++ b/bundle/config/artifact.go @@ -10,12 +10,13 @@ type ArtifactType string const ArtifactPythonWheel ArtifactType = `whl` -const ArtifactPythonWheelDynamicVersion = "jar" +const ArtifactJar ArtifactType = `jar` // Values returns all valid ArtifactType values func (ArtifactType) Values() []ArtifactType { return []ArtifactType{ ArtifactPythonWheel, + ArtifactJar, } } diff --git a/bundle/internal/validation/generated/enum_fields.go b/bundle/internal/validation/generated/enum_fields.go index cd36be8aa4..788b8ae96f 100644 --- a/bundle/internal/validation/generated/enum_fields.go +++ b/bundle/internal/validation/generated/enum_fields.go @@ -10,7 +10,7 @@ import ( // EnumFields maps [dyn.Pattern] to valid enum values they should have. var EnumFields = map[string][]string{ "artifacts.*.executable": {"bash", "sh", "cmd"}, - "artifacts.*.type": {"whl"}, + "artifacts.*.type": {"whl", "jar"}, "resources.apps.*.active_deployment.mode": {"AUTO_SYNC", "SNAPSHOT"}, "resources.apps.*.active_deployment.status.state": {"CANCELLED", "FAILED", "IN_PROGRESS", "SUCCEEDED"}, From 293f24876dc1fd724127f74427ac904a0f1ccb5d Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Wed, 6 Aug 2025 12:46:12 +0200 Subject: [PATCH 20/22] update with tests --- .../bundle/artifacts/artifacts_dynamic_version/output.txt | 4 ---- 1 file changed, 4 deletions(-) diff --git a/acceptance/bundle/artifacts/artifacts_dynamic_version/output.txt b/acceptance/bundle/artifacts/artifacts_dynamic_version/output.txt index 09cce670a3..8b4936fa85 100644 --- a/acceptance/bundle/artifacts/artifacts_dynamic_version/output.txt +++ b/acceptance/bundle/artifacts/artifacts_dynamic_version/output.txt @@ -1,7 +1,3 @@ -Warning: invalid value "jar" for enum field. Valid values are [whl] - at artifacts.second_wheel.type - in databricks.yml:7:11 - { "first_wheel": { "build": "uv build --wheel", From c841f6e51c4911794069d0c32c2c6a171ce5fbf7 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Fri, 8 Aug 2025 15:19:44 +0200 Subject: [PATCH 21/22] update acc tests --- acceptance/bundle/deploy/jobs/task-source/output.txt | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/acceptance/bundle/deploy/jobs/task-source/output.txt b/acceptance/bundle/deploy/jobs/task-source/output.txt index 56bce29363..09995ca089 100644 --- a/acceptance/bundle/deploy/jobs/task-source/output.txt +++ b/acceptance/bundle/deploy/jobs/task-source/output.txt @@ -1,5 +1,9 @@ >>> [CLI] bundle deploy +Warning: invalid value "github" for enum field. Valid values are [awsCodeCommit azureDevOpsServices bitbucketCloud bitbucketServer gitHub gitHubEnterprise gitLab gitLabEnterpriseEdition] + at resources.jobs.git_job.git_source.git_provider + in databricks.yml:11:23 + Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/task-source/default/files... Deploying resources... Updating deployment state... From ea7b3e23f59862c22910a6a8e4a3bb7d36a7bc51 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Fri, 8 Aug 2025 15:22:39 +0200 Subject: [PATCH 22/22] - --- bundle/config/validate/enum.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/bundle/config/validate/enum.go b/bundle/config/validate/enum.go index 56a90c9d95..0dacaf116c 100644 --- a/bundle/config/validate/enum.go +++ b/bundle/config/validate/enum.go @@ -53,10 +53,6 @@ func (f *enum) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { return nil } - // p is a slice of path components. We need to clone it before using it in diagnostics - // since the WalkReadOnly function will mutate it while walking the config tree. - cloneP := slices.Clone(p) - // Get valid values for this pattern validValues := generated.EnumFields[pattern.String()] @@ -64,6 +60,10 @@ func (f *enum) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { validValue := slices.Contains(validValues, strValue) if !validValue { + // p is a slice of path components. We need to clone it before using it in diagnostics + // since the WalkReadOnly function will mutate it while walking the config tree. + cloneP := slices.Clone(p) + diags = diags.Append(diag.Diagnostic{ Severity: diag.Warning, Summary: fmt.Sprintf("invalid value %q for enum field. Valid values are %v", strValue, validValues),