Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .codegen/_openapi_sha
Original file line number Diff line number Diff line change
@@ -1 +1 @@
59c4c0f3d5f0ef00cd5350b5674e941a7606d91a
8f5eedbc991c4f04ce1284406577b0c92d59a224
21 changes: 21 additions & 0 deletions acceptance/bundle/refschema/out.fields.txt
Original file line number Diff line number Diff line change
Expand Up @@ -1550,6 +1550,13 @@ resources.jobs.*.settings.trigger.file_arrival *jobs.FileArrivalTriggerConfigura
resources.jobs.*.settings.trigger.file_arrival.min_time_between_triggers_seconds int REMOTE
resources.jobs.*.settings.trigger.file_arrival.url string REMOTE
resources.jobs.*.settings.trigger.file_arrival.wait_after_last_change_seconds int REMOTE
resources.jobs.*.settings.trigger.model *jobs.ModelTriggerConfiguration REMOTE
resources.jobs.*.settings.trigger.model.aliases []string REMOTE
resources.jobs.*.settings.trigger.model.aliases[*] string REMOTE
resources.jobs.*.settings.trigger.model.condition jobs.ModelTriggerConfigurationCondition REMOTE
resources.jobs.*.settings.trigger.model.min_time_between_triggers_seconds int REMOTE
resources.jobs.*.settings.trigger.model.securable_name string REMOTE
resources.jobs.*.settings.trigger.model.wait_after_last_change_seconds int REMOTE
resources.jobs.*.settings.trigger.pause_status jobs.PauseStatus REMOTE
resources.jobs.*.settings.trigger.periodic *jobs.PeriodicTriggerConfiguration REMOTE
resources.jobs.*.settings.trigger.periodic.interval int REMOTE
Expand Down Expand Up @@ -2215,6 +2222,13 @@ resources.jobs.*.trigger.file_arrival *jobs.FileArrivalTriggerConfiguration INPU
resources.jobs.*.trigger.file_arrival.min_time_between_triggers_seconds int INPUT STATE
resources.jobs.*.trigger.file_arrival.url string INPUT STATE
resources.jobs.*.trigger.file_arrival.wait_after_last_change_seconds int INPUT STATE
resources.jobs.*.trigger.model *jobs.ModelTriggerConfiguration INPUT STATE
resources.jobs.*.trigger.model.aliases []string INPUT STATE
resources.jobs.*.trigger.model.aliases[*] string INPUT STATE
resources.jobs.*.trigger.model.condition jobs.ModelTriggerConfigurationCondition INPUT STATE
resources.jobs.*.trigger.model.min_time_between_triggers_seconds int INPUT STATE
resources.jobs.*.trigger.model.securable_name string INPUT STATE
resources.jobs.*.trigger.model.wait_after_last_change_seconds int INPUT STATE
resources.jobs.*.trigger.pause_status jobs.PauseStatus INPUT STATE
resources.jobs.*.trigger.periodic *jobs.PeriodicTriggerConfiguration INPUT STATE
resources.jobs.*.trigger.periodic.interval int INPUT STATE
Expand Down Expand Up @@ -2890,13 +2904,16 @@ resources.pipelines.*.filters.include[*] string INPUT STATE
resources.pipelines.*.gateway_definition *pipelines.IngestionGatewayPipelineDefinition INPUT STATE
resources.pipelines.*.gateway_definition.connection_id string INPUT STATE
resources.pipelines.*.gateway_definition.connection_name string INPUT STATE
resources.pipelines.*.gateway_definition.connection_parameters *pipelines.ConnectionParameters INPUT STATE
resources.pipelines.*.gateway_definition.connection_parameters.source_catalog string INPUT STATE
resources.pipelines.*.gateway_definition.gateway_storage_catalog string INPUT STATE
resources.pipelines.*.gateway_definition.gateway_storage_name string INPUT STATE
resources.pipelines.*.gateway_definition.gateway_storage_schema string INPUT STATE
resources.pipelines.*.health pipelines.GetPipelineResponseHealth REMOTE
resources.pipelines.*.id string INPUT STATE
resources.pipelines.*.ingestion_definition *pipelines.IngestionPipelineDefinition INPUT STATE
resources.pipelines.*.ingestion_definition.connection_name string INPUT STATE
resources.pipelines.*.ingestion_definition.ingest_from_uc_foreign_catalog bool INPUT STATE
resources.pipelines.*.ingestion_definition.ingestion_gateway_id string INPUT STATE
resources.pipelines.*.ingestion_definition.netsuite_jar_path string INPUT STATE
resources.pipelines.*.ingestion_definition.objects []pipelines.IngestionConfig INPUT STATE
Expand Down Expand Up @@ -3185,12 +3202,15 @@ resources.pipelines.*.spec.filters.include[*] string REMOTE
resources.pipelines.*.spec.gateway_definition *pipelines.IngestionGatewayPipelineDefinition REMOTE
resources.pipelines.*.spec.gateway_definition.connection_id string REMOTE
resources.pipelines.*.spec.gateway_definition.connection_name string REMOTE
resources.pipelines.*.spec.gateway_definition.connection_parameters *pipelines.ConnectionParameters REMOTE
resources.pipelines.*.spec.gateway_definition.connection_parameters.source_catalog string REMOTE
resources.pipelines.*.spec.gateway_definition.gateway_storage_catalog string REMOTE
resources.pipelines.*.spec.gateway_definition.gateway_storage_name string REMOTE
resources.pipelines.*.spec.gateway_definition.gateway_storage_schema string REMOTE
resources.pipelines.*.spec.id string REMOTE
resources.pipelines.*.spec.ingestion_definition *pipelines.IngestionPipelineDefinition REMOTE
resources.pipelines.*.spec.ingestion_definition.connection_name string REMOTE
resources.pipelines.*.spec.ingestion_definition.ingest_from_uc_foreign_catalog bool REMOTE
resources.pipelines.*.spec.ingestion_definition.ingestion_gateway_id string REMOTE
resources.pipelines.*.spec.ingestion_definition.netsuite_jar_path string REMOTE
resources.pipelines.*.spec.ingestion_definition.objects []pipelines.IngestionConfig REMOTE
Expand Down Expand Up @@ -3565,6 +3585,7 @@ resources.synced_database_tables.*.spec *database.SyncedTableSpec ALL
resources.synced_database_tables.*.spec.create_database_objects_if_missing bool ALL
resources.synced_database_tables.*.spec.existing_pipeline_id string ALL
resources.synced_database_tables.*.spec.new_pipeline_spec *database.NewPipelineSpec ALL
resources.synced_database_tables.*.spec.new_pipeline_spec.budget_policy_id string ALL
resources.synced_database_tables.*.spec.new_pipeline_spec.storage_catalog string ALL
resources.synced_database_tables.*.spec.new_pipeline_spec.storage_schema string ALL
resources.synced_database_tables.*.spec.primary_key_columns []string ALL
Expand Down
8 changes: 3 additions & 5 deletions acceptance/help/output.txt
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,11 @@ Compute
policy-compliance-for-clusters The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your workspace.
policy-families View available policy families.

Workflows
Lakeflow
jobs The Jobs API allows you to create, edit, and delete jobs.
pipelines The Lakeflow Spark Declarative Pipelines API allows you to create, edit, delete, start, and view details about pipelines.
policy-compliance-for-jobs The compliance APIs allow you to view and manage the policy compliance status of jobs in your workspace.

Delta Live Tables
pipelines The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines.

Machine Learning
experiments Experiments are the primary unit of organization in MLflow; all MLflow runs belong to an experiment.
model-registry Note: This API reference documents APIs for the Workspace Model Registry.
Expand Down Expand Up @@ -86,7 +84,7 @@ Unity Catalog
quality-monitors A monitor computes and monitors data or model quality metrics for a table over time.
registered-models Databricks provides a hosted version of MLflow Model Registry in Unity Catalog.
resource-quotas Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that can be created.
rfa Request for Access enables customers to request access to and manage access request destinations for Unity Catalog securables.
rfa Request for Access enables users to request access for Unity Catalog securables.
schemas A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace.
storage-credentials A storage credential represents an authentication and authorization mechanism for accessing data stored on your cloud tenant.
system-schemas A system schema is a schema that lives within the system catalog.
Expand Down
84 changes: 63 additions & 21 deletions bundle/internal/schema/annotations_openapi.yml
Original file line number Diff line number Diff line change
Expand Up @@ -450,8 +450,7 @@ github.com/databricks/cli/bundle/config/resources.Job:
"environments":
"description": |-
A list of task execution environment specifications that can be referenced by serverless tasks of this job.
An environment is required to be present for serverless tasks.
For serverless notebook tasks, the environment is accessible in the notebook environment panel.
For serverless notebook tasks, if the environment_key is not specified, the notebook environment will be used if present. If a jobs environment is specified, it will override the notebook environment.
For other serverless tasks, the task environment is required to be specified using environment_key in the task settings.
"format":
"description": |-
Expand Down Expand Up @@ -1467,7 +1466,7 @@ github.com/databricks/databricks-sdk-go/service/compute.AwsAttributes:
This string will be of a form like "us-west-2a". The provided availability
zone must be in the same region as the Databricks deployment. For example, "us-west-2a"
is not a valid zone id if the Databricks deployment resides in the "us-east-1" region.
This is an optional field at cluster creation, and if not specified, a default zone will be used.
This is an optional field at cluster creation, and if not specified, the zone "auto" will be used.
If the zone specified is "auto", will try to place cluster in a zone with high availability,
and will retry placement in a different AZ if there is not enough capacity.

Expand Down Expand Up @@ -1841,8 +1840,6 @@ github.com/databricks/databricks-sdk-go/service/compute.Environment:
"java_dependencies":
"description": |-
List of java dependencies. Each dependency is a string representing a java library path. For example: `/Volumes/path/to/test.jar`.
"x-databricks-preview": |-
PRIVATE
github.com/databricks/databricks-sdk-go/service/compute.GcpAttributes:
"_":
"description": |-
Expand Down Expand Up @@ -2173,6 +2170,9 @@ github.com/databricks/databricks-sdk-go/service/database.NewPipelineSpec:
"description": |-
Custom fields that user can set for pipeline while creating SyncedDatabaseTable.
Note that other fields of pipeline are still inferred by table def internally
"budget_policy_id":
"description": |-
Budget policy to set on the newly created pipeline.
"storage_catalog":
"description": |-
This field needs to be specified if the destination catalog is a managed postgres catalog.
Expand Down Expand Up @@ -2899,6 +2899,35 @@ github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRules:
"description": |-
An optional set of health rules that can be defined for this job.
"rules": {}
github.com/databricks/databricks-sdk-go/service/jobs.ModelTriggerConfiguration:
"aliases":
"description": |-
Aliases of the model versions to monitor. Can only be used in conjunction with condition MODEL_ALIAS_SET.
"condition":
"description": |-
The condition based on which to trigger a job run.
"min_time_between_triggers_seconds":
"description": |-
If set, the trigger starts a run only after the specified amount of time has passed since
the last time the trigger fired. The minimum allowed value is 60 seconds.
"securable_name":
"description": |-
Name of the securable to monitor ("mycatalog.myschema.mymodel" in the case of model-level triggers,
"mycatalog.myschema" in the case of schema-level triggers) or empty in the case of metastore-level triggers.
"wait_after_last_change_seconds":
"description": |-
If set, the trigger starts a run only after no model updates have occurred for the specified time
and can be used to wait for a series of model updates before triggering a run. The
minimum allowed value is 60 seconds.
github.com/databricks/databricks-sdk-go/service/jobs.ModelTriggerConfigurationCondition:
"_":
"enum":
- |-
MODEL_CREATED
- |-
MODEL_VERSION_READY
- |-
MODEL_ALIAS_SET
github.com/databricks/databricks-sdk-go/service/jobs.NotebookTask:
"base_parameters":
"description": |-
Expand Down Expand Up @@ -3516,6 +3545,9 @@ github.com/databricks/databricks-sdk-go/service/jobs.TriggerSettings:
"file_arrival":
"description": |-
File arrival trigger settings.
"model":
"x-databricks-preview": |-
PRIVATE
"pause_status":
"description": |-
Whether this trigger is paused or not.
Expand Down Expand Up @@ -3564,6 +3596,15 @@ github.com/databricks/databricks-sdk-go/service/ml.ModelTag:
"value":
"description": |-
The tag value.
github.com/databricks/databricks-sdk-go/service/pipelines.ConnectionParameters:
"source_catalog":
"description": |-
Source catalog for initial connection.
This is necessary for schema exploration in some database systems like Oracle, and optional but nice-to-have
in some other database systems like Postgres.
For Oracle databases, this maps to a service name.
"x-databricks-preview": |-
PRIVATE
github.com/databricks/databricks-sdk-go/service/pipelines.CronTrigger:
"quartz_cron_schedule": {}
"timezone_id": {}
Expand Down Expand Up @@ -3638,21 +3679,34 @@ github.com/databricks/databricks-sdk-go/service/pipelines.IngestionGatewayPipeli
"connection_name":
"description": |-
Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.
"connection_parameters":
"description": |-
Optional, Internal. Parameters required to establish an initial connection with the source.
"x-databricks-preview": |-
PRIVATE
"gateway_storage_catalog":
"description": |-
Required, Immutable. The name of the catalog for the gateway pipeline's storage location.
"gateway_storage_name":
"description": |-
Optional. The Unity Catalog-compatible name for the gateway storage location.
This is the destination to use for the data that is extracted by the gateway.
Delta Live Tables system will automatically create the storage location under the catalog and schema.
Spark Declarative Pipelines system will automatically create the storage location under the catalog and schema.
"gateway_storage_schema":
"description": |-
Required, Immutable. The name of the schema for the gateway pipelines's storage location.
github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinition:
"connection_name":
"description": |-
Immutable. The Unity Catalog connection that this ingestion pipeline uses to communicate with the source. This is used with connectors for applications like Salesforce, Workday, and so on.
"ingest_from_uc_foreign_catalog":
"description": |-
Immutable. If set to true, the pipeline will ingest tables from the
UC foreign catalogs directly without the need to specify a UC connection or ingestion gateway.
The `source_catalog` fields in objects of IngestionConfig are interpreted as
the UC foreign catalogs to ingest from.
"x-databricks-preview": |-
PRIVATE
"ingestion_gateway_id":
"description": |-
Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate with the source database. This is used with connectors to databases like SQL Server.
Expand All @@ -3669,8 +3723,6 @@ github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefin
"source_configurations":
"description": |-
Top-level source configurations
"x-databricks-preview": |-
PRIVATE
"source_type":
"description": |-
The type of the foreign source.
Expand Down Expand Up @@ -3783,6 +3835,8 @@ github.com/databricks/databricks-sdk-go/service/pipelines.IngestionSourceType:
SHAREPOINT
- |-
DYNAMICS365
- |-
FOREIGN_CATALOG
github.com/databricks/databricks-sdk-go/service/pipelines.ManualTrigger: {}
github.com/databricks/databricks-sdk-go/service/pipelines.NotebookLibrary:
"path":
Expand Down Expand Up @@ -3985,22 +4039,16 @@ github.com/databricks/databricks-sdk-go/service/pipelines.PostgresCatalogConfig:
"slot_config":
"description": |-
Optional. The Postgres slot configuration to use for logical replication
"x-databricks-preview": |-
PRIVATE
github.com/databricks/databricks-sdk-go/service/pipelines.PostgresSlotConfig:
"_":
"description": |-
PostgresSlotConfig contains the configuration for a Postgres logical replication slot
"publication_name":
"description": |-
The name of the publication to use for the Postgres source
"x-databricks-preview": |-
PRIVATE
"slot_name":
"description": |-
The name of the logical replication slot to use for the Postgres source
"x-databricks-preview": |-
PRIVATE
github.com/databricks/databricks-sdk-go/service/pipelines.ReportSpec:
"destination_catalog":
"description": |-
Expand Down Expand Up @@ -4065,19 +4113,13 @@ github.com/databricks/databricks-sdk-go/service/pipelines.SourceCatalogConfig:
"postgres":
"description": |-
Postgres-specific catalog-level configuration parameters
"x-databricks-preview": |-
PRIVATE
"source_catalog":
"description": |-
Source catalog name
"x-databricks-preview": |-
PRIVATE
github.com/databricks/databricks-sdk-go/service/pipelines.SourceConfig:
"catalog":
"description": |-
Catalog-level source configuration parameters
"x-databricks-preview": |-
PRIVATE
github.com/databricks/databricks-sdk-go/service/pipelines.TableSpec:
"destination_catalog":
"description": |-
Expand Down Expand Up @@ -4134,7 +4176,7 @@ github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig:
PRIVATE
"sequence_by":
"description": |-
The column names specifying the logical order of events in the source data. Delta Live Tables uses this sequencing to handle change events that arrive out of order.
The column names specifying the logical order of events in the source data. Spark Declarative Pipelines uses this sequencing to handle change events that arrive out of order.
"workday_report_parameters":
"description": |-
(Optional) Additional custom parameters for Workday Report
Expand Down
3 changes: 3 additions & 0 deletions bundle/internal/schema/annotations_openapi_overrides.yml
Original file line number Diff line number Diff line change
Expand Up @@ -891,6 +891,9 @@ github.com/databricks/databricks-sdk-go/service/jobs.Task:
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.TriggerSettings:
"model":
"description": |-
PLACEHOLDER
"table_update":
"description": |-
PLACEHOLDER
Expand Down
Loading