Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -141,5 +141,8 @@ generate:
@echo "Generating CLI code..."
$(GENKIT_BINARY) update-sdk

# Create a scratch testing environment to run tests on DBR.
dbr-scratch:
deco env run -i -n azure-prod-ucws -- go test -test.v -run TestSetupDbrRunner github.com/databricks/cli/acceptance -count 1

.PHONY: lint lintfull tidy lintcheck fmt fmtfull test cover showcover build snapshot snapshot-release schema integration integration-short acc-cover acc-showcover docs ws links checks test-update test-update-aws test-update-all generate-validation
.PHONY: lint lintfull tidy lintcheck fmt fmtfull test cover showcover build snapshot snapshot-release schema integration integration-short acc-cover acc-showcover docs ws links checks test-update test-update-aws test-update-all generate-validation dbr_scratch
9 changes: 8 additions & 1 deletion acceptance/acceptance_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -466,6 +466,13 @@ func getSkipReason(config *internal.TestConfig, configPath string) string {
return fmt.Sprintf("Disabled via RequiresCluster setting in %s (TEST_DEFAULT_CLUSTER_ID is empty)", configPath)
}

if isTruePtr(config.RequiresWorkspaceFilesystem) {
isDBR := os.Getenv("DATABRICKS_RUNTIME_VERSION") != ""
if !isDBR || !WorkspaceTmpDir {
return fmt.Sprintf("Disabled via RequiresWorkspaceFilesystem setting in %s (DATABRICKS_RUNTIME_VERSION=%s, WorkspaceTmpDir=%v)", configPath, os.Getenv("DATABRICKS_RUNTIME_VERSION"), WorkspaceTmpDir)
}
}

} else {
// Local run
if !isTruePtr(config.Local) {
Expand Down Expand Up @@ -516,7 +523,7 @@ func runTest(t *testing.T,
// If the test is being run on DBR, auth is already configured
// by the dbr_runner notebook by reading a token from the notebook context and
// setting DATABRICKS_TOKEN and DATABRICKS_HOST environment variables.
_, _, tmpDir = workspaceTmpDir(t.Context(), t)
Copy link
Contributor Author

@shreyas-goenka shreyas-goenka Sep 7, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This was necesary to ensure the test directory gets cleaned up. Otherwise the context seems to be cancelled before t.Cleanup is executed.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The doc for testing.T.Context says as much: https://pkg.go.dev/testing#T.Context

Unless there is another context to use, it seems better to use context.Background() in the cleanup function directly, or other call sites will be prone to the same error.

tmpDir = workspaceTmpDir(context.Background(), t)

// Run DBR tests on the workspace file system to mimic usage from
// DABs in the workspace.
Expand Down
134 changes: 126 additions & 8 deletions acceptance/dbr_test.go
Original file line number Diff line number Diff line change
@@ -1,20 +1,29 @@
package acceptance_test

import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"os"
"path"
"path/filepath"
"strconv"
"testing"
"time"

"github.com/databricks/cli/internal/testarchive"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/apierr"
"github.com/databricks/databricks-sdk-go/service/workspace"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)

func workspaceTmpDir(ctx context.Context, t *testing.T) (*databricks.WorkspaceClient, filer.Filer, string) {
func workspaceTmpDir(ctx context.Context, t *testing.T) string {
w, err := databricks.NewWorkspaceClient()
require.NoError(t, err)

Expand All @@ -29,19 +38,128 @@
uuid.New().String(),
)

// Create the directory using os.MkdirAll (via FUSE)
err = os.MkdirAll(tmpDir, 0o755)
require.NoError(t, err)

t.Cleanup(func() {
err := w.Workspace.Delete(ctx, workspace.Delete{
Path: tmpDir,
Recursive: true,
})
// Remove the directory using os.RemoveAll (via FUSE)
err := os.RemoveAll(tmpDir)
assert.NoError(t, err)
})

err = w.Workspace.MkdirsByPath(ctx, tmpDir)
return tmpDir
}

// Stable scratch directory to run and iterate on DBR tests.
func workspaceStableDir(ctx context.Context, t *testing.T) (w *databricks.WorkspaceClient, f filer.Filer, path string) {
w, err := databricks.NewWorkspaceClient()
require.NoError(t, err)

currentUser, err := w.CurrentUser.Me(ctx)
require.NoError(t, err)

path = fmt.Sprintf("/Workspace/Users/%s/dbr_scratch", currentUser.UserName)

// Delete the directory if it exists.
err = w.Workspace.Delete(ctx, workspace.Delete{
Path: path,
Recursive: true,
})
var aerr *apierr.APIError
if err != nil && (!errors.As(err, &aerr) || aerr.ErrorCode != "RESOURCE_DOES_NOT_EXIST") {
t.Fatalf("Failed to delete directory %s: %v", path, err)
}

err = w.Workspace.MkdirsByPath(ctx, path)
require.NoError(t, err)

f, err := filer.NewWorkspaceFilesClient(w, tmpDir)
// Create a filer client for the workspace.
f, err = filer.NewWorkspaceFilesClient(w, path)
require.NoError(t, err)

return w, f, tmpDir
return w, f, path
}

func buildAndUploadArchive(ctx context.Context, t *testing.T, f filer.Filer) {
archiveDir := t.TempDir()
binDir := t.TempDir()
archiveName := "archive.tar.gz"

// Build the CLI archives and upload to the workspace.
testarchive.CreateArchive(archiveDir, binDir, "..")

Check failure on line 90 in acceptance/dbr_test.go

View workflow job for this annotation

GitHub Actions / lint

Error return value of `testarchive.CreateArchive` is not checked (errcheck)

archiveReader, err := os.Open(filepath.Join(archiveDir, archiveName))
require.NoError(t, err)

err = f.Write(ctx, archiveName, archiveReader)
require.NoError(t, err)

err = archiveReader.Close()
require.NoError(t, err)
}

func uploadScratchRunner(ctx context.Context, t *testing.T, f filer.Filer, w *databricks.WorkspaceClient, dir string) string {
runnerReader, err := os.Open("scratch_dbr_runner.ipynb")
require.NoError(t, err)

err = f.Write(ctx, "scratch_dbr_runner.ipynb", runnerReader)
require.NoError(t, err)

err = runnerReader.Close()
require.NoError(t, err)

status, err := w.Workspace.GetStatusByPath(ctx, path.Join(dir, "scratch_dbr_runner"))
require.NoError(t, err)

url := w.Config.Host + "/editor/notebooks/" + strconv.FormatInt(status.ObjectId, 10)

return url
}

func uploadParams(ctx context.Context, t *testing.T, f filer.Filer) {
names := []string{
"CLOUD_ENV",
"TEST_DEFAULT_CLUSTER_ID",
"TEST_DEFAULT_WAREHOUSE_ID",
"TEST_INSTANCE_POOL_ID",
"TEST_METASTORE_ID",
}

env := make(map[string]string)
for _, name := range names {
env[name] = os.Getenv(name)
}

b, err := json.MarshalIndent(env, "", " ")
require.NoError(t, err)

err = f.Write(ctx, "params.json", bytes.NewReader(b))
require.NoError(t, err)
}

// Running this test will setup a DBR test runner the configured workspace.
// You'll need to run the tests by actually running the notebook on the workspace.
func TestSetupDbrRunner(t *testing.T) {
ctx := t.Context()
w, f, dir := workspaceStableDir(ctx, t)

t.Logf("Building and uploading archive...")
buildAndUploadArchive(ctx, t, f)

t.Logf("Uploading params...")
uploadParams(ctx, t, f)

t.Logf("Uploading runner...")
url := uploadScratchRunner(ctx, t, f, w, dir)

t.Logf("Created DBR testing notebook at: %s", url)
}

func TestArchive(t *testing.T) {
archiveDir := t.TempDir()
binDir := t.TempDir()
testarchive.CreateArchive(archiveDir, binDir, "..")

Check failure on line 162 in acceptance/dbr_test.go

View workflow job for this annotation

GitHub Actions / lint

Error return value of `testarchive.CreateArchive` is not checked (errcheck)

assert.FileExists(t, filepath.Join(archiveDir, "archive.tar.gz"))
}
5 changes: 5 additions & 0 deletions acceptance/internal/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,11 @@ type TestConfig struct {
// If true and Cloud=true, run this test only if a default warehouse is available in the cloud environment
RequiresWarehouse *bool

// If true run this test only if running on DBR with workspace filesystem
// Note that this implicitly implies Cloud=true since running on the workspace
// file system is only supported for integration tests.
RequiresWorkspaceFilesystem *bool

// If set, current user will be set to a service principal-like UUID instead of email (default is false)
IsServicePrincipal *bool

Expand Down
38 changes: 20 additions & 18 deletions acceptance/internal/materialized_config.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,30 +9,32 @@ import (
const MaterializedConfigFile = "out.test.toml"

type MaterializedConfig struct {
GOOS map[string]bool `toml:"GOOS,omitempty"`
CloudEnvs map[string]bool `toml:"CloudEnvs,omitempty"`
Local *bool `toml:"Local,omitempty"`
Cloud *bool `toml:"Cloud,omitempty"`
CloudSlow *bool `toml:"CloudSlow,omitempty"`
RequiresUnityCatalog *bool `toml:"RequiresUnityCatalog,omitempty"`
RequiresCluster *bool `toml:"RequiresCluster,omitempty"`
RequiresWarehouse *bool `toml:"RequiresWarehouse,omitempty"`
EnvMatrix map[string][]string `toml:"EnvMatrix,omitempty"`
GOOS map[string]bool `toml:"GOOS,omitempty"`
CloudEnvs map[string]bool `toml:"CloudEnvs,omitempty"`
Local *bool `toml:"Local,omitempty"`
Cloud *bool `toml:"Cloud,omitempty"`
CloudSlow *bool `toml:"CloudSlow,omitempty"`
RequiresUnityCatalog *bool `toml:"RequiresUnityCatalog,omitempty"`
RequiresCluster *bool `toml:"RequiresCluster,omitempty"`
RequiresWarehouse *bool `toml:"RequiresWarehouse,omitempty"`
RequiresWorkspaceFilesystem *bool `toml:"RequiresWorkspaceFilesystem,omitempty"`
EnvMatrix map[string][]string `toml:"EnvMatrix,omitempty"`
}

// GenerateMaterializedConfig creates a TOML representation of the configuration fields
// that determine where and how a test is executed
func GenerateMaterializedConfig(config TestConfig) (string, error) {
materialized := MaterializedConfig{
GOOS: config.GOOS,
CloudEnvs: config.CloudEnvs,
Local: config.Local,
Cloud: config.Cloud,
CloudSlow: config.CloudSlow,
RequiresUnityCatalog: config.RequiresUnityCatalog,
RequiresCluster: config.RequiresCluster,
RequiresWarehouse: config.RequiresWarehouse,
EnvMatrix: config.EnvMatrix,
GOOS: config.GOOS,
CloudEnvs: config.CloudEnvs,
Local: config.Local,
Cloud: config.Cloud,
CloudSlow: config.CloudSlow,
RequiresUnityCatalog: config.RequiresUnityCatalog,
RequiresCluster: config.RequiresCluster,
RequiresWarehouse: config.RequiresWarehouse,
RequiresWorkspaceFilesystem: config.RequiresWorkspaceFilesystem,
EnvMatrix: config.EnvMatrix,
}

var buf bytes.Buffer
Expand Down
Loading
Loading