Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions NEXT_CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,5 +9,6 @@
### CLI

### Bundles
- Fixed an issue where `allow_duplicate_names` field on the pipeline definition was ignored by the bundle ([#3274](https://github.com/databricks/cli/pull/3274))

### API Changes
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
bundle:
name: acc-bundle-deploy-pipeline-duplicate-names-$UNIQUE_NAME

resources:
pipelines:
pipeline_one:
name: test-pipeline-same-name-$UNIQUE_NAME
allow_duplicate_names: true
libraries:
- file:
path: "./foo.py"
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
Local = true
Cloud = true

[EnvMatrix]
DATABRICKS_CLI_DEPLOYMENT = ["terraform", "direct-exp"]
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@

>>> [CLI] bundle deploy
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/acc-bundle-deploy-pipeline-duplicate-names-[UNIQUE_NAME]/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!

>>> print_requests
{
"body": {
"libraries": [
{
"file": {
"path": "/some-script.py"
}
}
],
"name": "test-pipeline-same-name-[UNIQUE_NAME]"
},
"method": "POST",
"path": "/api/2.0/pipelines"
}
{
"body": {
"allow_duplicate_names": true,
"channel": "CURRENT",
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/acc-bundle-deploy-pipeline-duplicate-names-[UNIQUE_NAME]/default/state/metadata.json"
},
"edition": "ADVANCED",
"libraries": [
{
"file": {
"path": "/Workspace/Users/[USERNAME]/.bundle/acc-bundle-deploy-pipeline-duplicate-names-[UNIQUE_NAME]/default/files/foo.py"
}
}
],
"name": "test-pipeline-same-name-[UNIQUE_NAME]"
},
"method": "POST",
"path": "/api/2.0/pipelines"
}

>>> [CLI] bundle destroy --auto-approve
The following resources will be deleted:
delete pipeline pipeline_one

All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/acc-bundle-deploy-pipeline-duplicate-names-[UNIQUE_NAME]/default

Deleting files...
Destroy complete!

>>> [CLI] pipelines delete [UUID]
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
{
"name": "test-pipeline-same-name-$UNIQUE_NAME",
"libraries": [
{
"file": {
"path": "/some-script.py"
}
}
]
}
22 changes: 22 additions & 0 deletions acceptance/bundle/deploy/pipeline/allow-duplicate-names/script
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
envsubst < databricks.yml.tmpl > databricks.yml
envsubst < pipeline.json.tmpl > pipeline.json
touch foo.py

cleanup() {
trace $CLI bundle destroy --auto-approve
trace $CLI pipelines delete ${PIPELINE_ID}
rm out.requests.txt
}
trap cleanup EXIT

# Create a pre-existing pipeline:
PIPELINE_ID=$($CLI pipelines create --json @pipeline.json | jq -r .pipeline_id)
export PIPELINE_ID

# Deploy the bundle that has a pipeline with the same name:
trace $CLI bundle deploy

print_requests() {
jq --sort-keys 'select(.method != "GET" and (.path | contains("/pipelines")))' < out.requests.txt
}
trace print_requests
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
Cloud = true
Ignore = ["foo.py","pipeline.json"]
RecordRequests = true
2 changes: 1 addition & 1 deletion bundle/deploy/terraform/tfdyn/convert_pipeline.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ func convertPipelineResource(ctx context.Context, vin dyn.Value) (dyn.Value, err
return dyn.InvalidValue, err
}

vout, err = dyn.DropKeys(vout, []string{"allow_duplicate_names", "dry_run"})
vout, err = dyn.DropKeys(vout, []string{"dry_run"})
if err != nil {
return dyn.InvalidValue, err
}
Expand Down
1 change: 1 addition & 0 deletions bundle/deploy/terraform/tfdyn/convert_pipeline_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,7 @@ func TestConvertPipeline(t *testing.T) {
},
},
},
"allow_duplicate_names": true,
"cluster": []any{
map[string]any{
"label": "default",
Expand Down
1 change: 0 additions & 1 deletion bundle/internal/schema/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,6 @@ func removePipelineFields(typ reflect.Type, s jsonschema.Schema) jsonschema.Sche
// Even though DABs supports this field, TF provider does not. Thus, we
// should not expose it to the user.
delete(s.Properties, "dry_run")
delete(s.Properties, "allow_duplicate_names")

// These fields are only meant to be set by the DABs client (ie the CLI)
// and thus should not be exposed to the user. These are used to annotate
Expand Down
4 changes: 4 additions & 0 deletions bundle/schema/jsonschema.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,11 @@
class Pipeline(Resource):
""""""

allow_duplicate_names: VariableOrOptional[bool] = None
"""
If false, deployment will fail if name conflicts with that of another pipeline.
"""

budget_policy_id: VariableOrOptional[str] = None
"""
:meta private: [EXPERIMENTAL]
Expand Down Expand Up @@ -212,6 +217,11 @@ def as_dict(self) -> "PipelineDict":
class PipelineDict(TypedDict, total=False):
""""""

allow_duplicate_names: VariableOrOptional[bool]
"""
If false, deployment will fail if name conflicts with that of another pipeline.
"""

budget_policy_id: VariableOrOptional[str]
"""
:meta private: [EXPERIMENTAL]
Expand Down
Loading