Skip to content

Commit 00940f5

Browse files
authored
Convert integration test TestBundlePipelineDeleteWithoutAutoApprove to an acceptance test (#2891)
## Why <!-- Why are these changes needed? Provide the context that the reviewer might be missing. For example, were there any decisions behind the change that are not reflected in the code itself? --> One change in the series of changes for converting integration tests into acceptance tests. This will allow for easier testing of various backing solutions for bundle deployment
1 parent 1b319df commit 00940f5

File tree

9 files changed

+129
-48
lines changed

9 files changed

+129
-48
lines changed
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
# Databricks notebook source
2+
print("hello bar")
Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
bundle:
2+
name: test-pipeline-deploy-then-remove-$UNIQUE_NAME
3+
4+
workspace:
5+
root_path: ~/.bundle/$UNIQUE_NAME
6+
7+
include:
8+
- "./*.yml"
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
print("hello foo")
Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
2+
>>> [CLI] bundle deploy
3+
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/[UNIQUE_NAME]/files...
4+
Deploying resources...
5+
Updating deployment state...
6+
Deployment complete!
7+
8+
=== Assert the pipeline is created
9+
>>> [CLI] pipelines get [UUID]
10+
{
11+
"spec": {
12+
"channel": "CURRENT",
13+
"deployment": {
14+
"kind": "BUNDLE",
15+
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/[UNIQUE_NAME]/state/metadata.json"
16+
},
17+
"edition": "ADVANCED",
18+
"id": "[UUID]",
19+
"libraries": [
20+
{
21+
"file": {
22+
"path": "/Workspace/Users/[USERNAME]/.bundle/[UNIQUE_NAME]/files/foo.py"
23+
}
24+
}
25+
],
26+
"name": "test-bundle-pipeline-[UNIQUE_NAME]",
27+
"storage": "dbfs:/pipelines/[UUID]"
28+
}
29+
}
30+
31+
=== Assert the job is created{
32+
"name": "test-bundle-job-[UNIQUE_NAME]"
33+
}
34+
35+
=== Remove resources from configuration.
36+
>>> rm resources.yml
37+
38+
=== Try to redeploy the bundle - should fail without --auto-approve
39+
>>> errcode [CLI] bundle deploy
40+
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/[UNIQUE_NAME]/files...
41+
42+
This action will result in the deletion or recreation of the following DLT Pipelines along with the
43+
Streaming Tables (STs) and Materialized Views (MVs) managed by them. Recreating the Pipelines will
44+
restore the defined STs and MVs through full refresh. Note that recreation is necessary when pipeline
45+
properties such as the 'catalog' or 'storage' are changed:
46+
delete pipeline bar
47+
Error: the deployment requires destructive actions, but current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed
48+
49+
50+
Exit code: 1
51+
52+
>>> [CLI] bundle destroy --auto-approve
53+
The following resources will be deleted:
54+
delete job foo
55+
delete pipeline bar
56+
57+
All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/[UNIQUE_NAME]
58+
59+
Deleting files...
60+
Destroy complete!
Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
resources:
2+
jobs:
3+
foo:
4+
name: test-bundle-job-$UNIQUE_NAME
5+
tasks:
6+
- task_key: my_notebook_task
7+
new_cluster:
8+
num_workers: 1
9+
spark_version: $DEFAULT_SPARK_VERSION
10+
node_type_id: $NODE_TYPE_ID
11+
notebook_task:
12+
notebook_path: "./bar.py"
13+
pipelines:
14+
bar:
15+
name: test-bundle-pipeline-$UNIQUE_NAME
16+
libraries:
17+
- file:
18+
path: "./foo.py"
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
envsubst < databricks.yml.tmpl > databricks.yml
2+
envsubst < resources.yml.tmpl > resources.yml
3+
4+
cleanup() {
5+
trace $CLI bundle destroy --auto-approve
6+
}
7+
trap cleanup EXIT
8+
9+
trace $CLI bundle deploy
10+
11+
title "Assert the pipeline is created"
12+
PIPELINE_ID=$($CLI bundle summary -o json | jq -r '.resources.pipelines.bar.id')
13+
trace $CLI pipelines get "${PIPELINE_ID}" | jq "{spec}"
14+
15+
title "Assert the job is created"
16+
JOB_ID=$($CLI bundle summary -o json | jq -r '.resources.jobs.foo.id')
17+
$CLI jobs get "${JOB_ID}" | jq '{name: .settings.name}'
18+
19+
title "Remove resources from configuration."
20+
trace rm resources.yml
21+
22+
title "Try to redeploy the bundle - should fail without --auto-approve"
23+
trace errcode $CLI bundle deploy
Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
Local = true
2+
Cloud = true
3+
4+
Ignore = [
5+
"databricks.yml"
6+
]
7+
8+
[[Repls]]
9+
Old = "[0-9]{3,}"
10+
New = "[NUMID]"

integration/bundle/deploy_test.go

Lines changed: 0 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,6 @@ package bundle_test
22

33
import (
44
"fmt"
5-
"os"
6-
"path/filepath"
75
"strings"
86
"testing"
97

@@ -18,52 +16,6 @@ import (
1816
"github.com/stretchr/testify/require"
1917
)
2018

21-
func TestBundlePipelineDeleteWithoutAutoApprove(t *testing.T) {
22-
ctx, wt := acc.WorkspaceTest(t)
23-
w := wt.W
24-
25-
nodeTypeId := testutil.GetCloud(t).NodeTypeID()
26-
uniqueId := uuid.New().String()
27-
bundleRoot := initTestTemplate(t, ctx, "deploy_then_remove_resources", map[string]any{
28-
"unique_id": uniqueId,
29-
"node_type_id": nodeTypeId,
30-
"spark_version": defaultSparkVersion,
31-
})
32-
33-
// deploy pipeline
34-
deployBundle(t, ctx, bundleRoot)
35-
36-
// assert pipeline is created
37-
pipelineName := "test-bundle-pipeline-" + uniqueId
38-
pipeline, err := w.Pipelines.GetByName(ctx, pipelineName)
39-
require.NoError(t, err)
40-
assert.Equal(t, pipeline.Name, pipelineName)
41-
42-
// assert job is created
43-
jobName := "test-bundle-job-" + uniqueId
44-
job, err := w.Jobs.GetBySettingsName(ctx, jobName)
45-
require.NoError(t, err)
46-
assert.Equal(t, job.Settings.Name, jobName)
47-
48-
// delete resources.yml
49-
err = os.Remove(filepath.Join(bundleRoot, "resources.yml"))
50-
require.NoError(t, err)
51-
52-
// Redeploy the bundle. Expect it to fail because deleting the pipeline requires --auto-approve.
53-
ctx = env.Set(ctx, "BUNDLE_ROOT", bundleRoot)
54-
ctx = env.Set(ctx, "TERM", "dumb")
55-
c := testcli.NewRunner(t, ctx, "bundle", "deploy", "--force-lock")
56-
stdout, stderr, err := c.Run()
57-
58-
assert.EqualError(t, err, root.ErrAlreadyPrinted.Error())
59-
assert.Contains(t, stderr.String(), `This action will result in the deletion or recreation of the following DLT Pipelines along with the
60-
Streaming Tables (STs) and Materialized Views (MVs) managed by them. Recreating the Pipelines will
61-
restore the defined STs and MVs through full refresh. Note that recreation is necessary when pipeline
62-
properties such as the 'catalog' or 'storage' are changed:
63-
delete pipeline bar`)
64-
assert.Contains(t, stdout.String(), "the deployment requires destructive actions, but current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed")
65-
}
66-
6719
func TestBundlePipelineRecreateWithoutAutoApprove(t *testing.T) {
6820
ctx, wt := acc.UcWorkspaceTest(t)
6921
w := wt.W

libs/testserver/fake_workspace.go

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -219,6 +219,13 @@ func (s *FakeWorkspace) PipelinesCreate(r pipelines.PipelineSpec) Response {
219219
pipelineId := uuid.New().String()
220220

221221
r.Id = pipelineId
222+
223+
// If the pipeline definition does not specify a catalog, it switches to Hive metastore mode
224+
// and if the storage location is not specified, API automatically generates a storage location
225+
// (ref: https://docs.databricks.com/gcp/en/dlt/hive-metastore#specify-a-storage-location)
226+
if r.Storage == "" && r.Catalog == "" {
227+
r.Storage = "dbfs:/pipelines/" + pipelineId
228+
}
222229
s.Pipelines[pipelineId] = r
223230

224231
return Response{

0 commit comments

Comments
 (0)