Skip to content

Commit dec5727

Browse files
authored
Preserve allow_duplicate_names field in the pipeline resource (#3274)
## Changes <!-- Brief summary of your changes that is easy to understand --> - Fixed pipeline resource conversion to preserve `allow_duplicate_names` configuration field - Updated schema and codegen to reflect the `allow_duplicate_names` field ## Why <!-- Why are these changes needed? Provide the context that the reviewer might be missing. For example, were there any decisions behind the change that are not reflected in the code itself? --> The `allow_duplicate_names` field was being dropped from pipeline resources during conversion, preventing users from configuring pipelines that allow duplicate names. This change ensures the field is properly preserved and passed through to the Terraform Provider ## Tests <!-- How have you tested the changes? --> - Added a new acceptance test <!-- If your PR needs to be included in the release notes for next release, add a separate entry in NEXT_CHANGELOG.md as part of your PR. -->
1 parent a338fdb commit dec5727

File tree

12 files changed

+122
-2
lines changed

12 files changed

+122
-2
lines changed

NEXT_CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,5 +9,6 @@
99
### CLI
1010

1111
### Bundles
12+
- Fixed an issue where `allow_duplicate_names` field on the pipeline definition was ignored by the bundle ([#3274](https://github.com/databricks/cli/pull/3274))
1213

1314
### API Changes
Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
bundle:
2+
name: acc-bundle-deploy-pipeline-duplicate-names-$UNIQUE_NAME
3+
4+
resources:
5+
pipelines:
6+
pipeline_one:
7+
name: test-pipeline-same-name-$UNIQUE_NAME
8+
allow_duplicate_names: true
9+
libraries:
10+
- file:
11+
path: "./foo.py"
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
Local = true
2+
Cloud = true
3+
4+
[EnvMatrix]
5+
DATABRICKS_CLI_DEPLOYMENT = ["terraform", "direct-exp"]
Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
2+
>>> [CLI] bundle deploy
3+
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/acc-bundle-deploy-pipeline-duplicate-names-[UNIQUE_NAME]/default/files...
4+
Deploying resources...
5+
Updating deployment state...
6+
Deployment complete!
7+
8+
>>> print_requests
9+
{
10+
"body": {
11+
"libraries": [
12+
{
13+
"file": {
14+
"path": "/some-script.py"
15+
}
16+
}
17+
],
18+
"name": "test-pipeline-same-name-[UNIQUE_NAME]"
19+
},
20+
"method": "POST",
21+
"path": "/api/2.0/pipelines"
22+
}
23+
{
24+
"body": {
25+
"allow_duplicate_names": true,
26+
"channel": "CURRENT",
27+
"deployment": {
28+
"kind": "BUNDLE",
29+
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/acc-bundle-deploy-pipeline-duplicate-names-[UNIQUE_NAME]/default/state/metadata.json"
30+
},
31+
"edition": "ADVANCED",
32+
"libraries": [
33+
{
34+
"file": {
35+
"path": "/Workspace/Users/[USERNAME]/.bundle/acc-bundle-deploy-pipeline-duplicate-names-[UNIQUE_NAME]/default/files/foo.py"
36+
}
37+
}
38+
],
39+
"name": "test-pipeline-same-name-[UNIQUE_NAME]"
40+
},
41+
"method": "POST",
42+
"path": "/api/2.0/pipelines"
43+
}
44+
45+
>>> [CLI] bundle destroy --auto-approve
46+
The following resources will be deleted:
47+
delete pipeline pipeline_one
48+
49+
All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/acc-bundle-deploy-pipeline-duplicate-names-[UNIQUE_NAME]/default
50+
51+
Deleting files...
52+
Destroy complete!
53+
54+
>>> [CLI] pipelines delete [UUID]
Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
{
2+
"name": "test-pipeline-same-name-$UNIQUE_NAME",
3+
"libraries": [
4+
{
5+
"file": {
6+
"path": "/some-script.py"
7+
}
8+
}
9+
]
10+
}
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
envsubst < databricks.yml.tmpl > databricks.yml
2+
envsubst < pipeline.json.tmpl > pipeline.json
3+
touch foo.py
4+
5+
cleanup() {
6+
trace $CLI bundle destroy --auto-approve
7+
trace $CLI pipelines delete ${PIPELINE_ID}
8+
rm out.requests.txt
9+
}
10+
trap cleanup EXIT
11+
12+
# Create a pre-existing pipeline:
13+
PIPELINE_ID=$($CLI pipelines create --json @pipeline.json | jq -r .pipeline_id)
14+
export PIPELINE_ID
15+
16+
# Deploy the bundle that has a pipeline with the same name:
17+
trace $CLI bundle deploy
18+
19+
print_requests() {
20+
jq --sort-keys 'select(.method != "GET" and (.path | contains("/pipelines")))' < out.requests.txt
21+
}
22+
trace print_requests
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
Cloud = true
2+
Ignore = ["foo.py","pipeline.json"]
3+
RecordRequests = true

bundle/deploy/terraform/tfdyn/convert_pipeline.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ func convertPipelineResource(ctx context.Context, vin dyn.Value) (dyn.Value, err
2121
return dyn.InvalidValue, err
2222
}
2323

24-
vout, err = dyn.DropKeys(vout, []string{"allow_duplicate_names", "dry_run"})
24+
vout, err = dyn.DropKeys(vout, []string{"dry_run"})
2525
if err != nil {
2626
return dyn.InvalidValue, err
2727
}

bundle/deploy/terraform/tfdyn/convert_pipeline_test.go

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -116,6 +116,7 @@ func TestConvertPipeline(t *testing.T) {
116116
},
117117
},
118118
},
119+
"allow_duplicate_names": true,
119120
"cluster": []any{
120121
map[string]any{
121122
"label": "default",

bundle/internal/schema/main.go

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,6 @@ func removePipelineFields(typ reflect.Type, s jsonschema.Schema) jsonschema.Sche
115115
// Even though DABs supports this field, TF provider does not. Thus, we
116116
// should not expose it to the user.
117117
delete(s.Properties, "dry_run")
118-
delete(s.Properties, "allow_duplicate_names")
119118

120119
// These fields are only meant to be set by the DABs client (ie the CLI)
121120
// and thus should not be exposed to the user. These are used to annotate

0 commit comments

Comments
 (0)