From 42aef75a9ade8fca5e7e1b6007bd984f7fdaba2b Mon Sep 17 00:00:00 2001 From: Tejas Kochar Date: Mon, 22 Dec 2025 04:19:34 +0000 Subject: [PATCH 1/2] Add support for parsing scopes from config file --- .../sdk/core/ConfigAttributeAccessor.java | 22 ++++++++++ .../databricks/sdk/core/DatabricksConfig.java | 8 ++++ .../sdk/core/DatabricksConfigTest.java | 40 +++++++++++++++++++ .../test/resources/testdata/.databrickscfg | 13 +++++- 4 files changed, 82 insertions(+), 1 deletion(-) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ConfigAttributeAccessor.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ConfigAttributeAccessor.java index bfda43d73..5553dbac7 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ConfigAttributeAccessor.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ConfigAttributeAccessor.java @@ -2,9 +2,13 @@ import com.databricks.sdk.support.InternalApi; import java.lang.reflect.Field; +import java.lang.reflect.ParameterizedType; import java.time.Duration; +import java.util.Arrays; +import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.stream.Collectors; @InternalApi class ConfigAttributeAccessor { @@ -63,6 +67,21 @@ public void setValueOnConfig(DatabricksConfig cfg, String value) throws IllegalA field.set(cfg, seconds > 0 ? Duration.ofSeconds(seconds) : null); } else if (field.getType() == ProxyConfig.ProxyAuthType.class) { field.set(cfg, ProxyConfig.ProxyAuthType.valueOf(value)); + } else if (List.class.isAssignableFrom(field.getType())) { + // Handle List fields (e.g., scopes) + // Parse comma and/or whitespace separated values from environment variable or config file + if (field.getGenericType() instanceof ParameterizedType) { + ParameterizedType paramType = (ParameterizedType) field.getGenericType(); + if (paramType.getActualTypeArguments().length > 0 + && paramType.getActualTypeArguments()[0] == String.class) { + // Split by commas and/or whitespace and filter out empty strings + List list = + Arrays.stream(value.trim().split("[,\\s]+")) + .filter(s -> !s.isEmpty()) + .collect(Collectors.toList()); + field.set(cfg, list); + } + } } field.setAccessible(false); } @@ -91,6 +110,9 @@ public String toString() { } public String getAsString(Object value) { + if (value instanceof List) { + return String.join(" ", (List) value); + } return value.toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java index fb763aad2..2ae780e10 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java @@ -204,6 +204,7 @@ private synchronized DatabricksConfig innerResolve() { try { ConfigLoader.resolve(this); ConfigLoader.validate(this); + sortScopes(); ConfigLoader.fixHostIfNeeded(this); initHttp(); return this; @@ -212,6 +213,13 @@ private synchronized DatabricksConfig innerResolve() { } } + /** Sort scopes in-place for better de-duplication in the refresh token cache. */ + private void sortScopes() { + if (scopes != null && !scopes.isEmpty()) { + java.util.Collections.sort(scopes); + } + } + private void initHttp() { if (httpClient != null) { return; diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/DatabricksConfigTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/DatabricksConfigTest.java index 5600f5e51..f93daa628 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/DatabricksConfigTest.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/DatabricksConfigTest.java @@ -14,10 +14,15 @@ import java.io.IOException; import java.time.Duration; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.stream.Stream; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; public class DatabricksConfigTest { @Test @@ -322,4 +327,39 @@ public void testDisableOauthRefreshTokenEnvironmentVariable() { assertEquals(true, config.getDisableOauthRefreshToken()); } + + // Config File Scope Parsing Tests + + private static Stream provideConfigFileScopesTestCases() { + return Stream.of( + Arguments.of("Empty scopes defaults to all-apis", "scope-empty", Arrays.asList("all-apis")), + Arguments.of("Single scope", "scope-single", Arrays.asList("clusters:read")), + Arguments.of( + "Multiple scopes sorted alphabetically", + "scope-multiple", + Arrays.asList( + "clusters", + "files:read", + "iam:read", + "jobs", + "mlflow", + "model-serving:read", + "pipelines"))); + } + + @ParameterizedTest(name = "{0}") + @MethodSource("provideConfigFileScopesTestCases") + public void testConfigFileScopes(String testName, String profile, List expectedScopes) { + Map env = new HashMap<>(); + env.put("HOME", "src/test/resources/testdata"); + + DatabricksConfig config = new DatabricksConfig().setProfile(profile); + config.resolve(new Environment(env, new ArrayList<>(), System.getProperty("os.name"))); + + List scopes = config.getScopes(); + assertEquals(expectedScopes.size(), scopes.size()); + for (int i = 0; i < expectedScopes.size(); i++) { + assertEquals(expectedScopes.get(i), scopes.get(i)); + } + } } diff --git a/databricks-sdk-java/src/test/resources/testdata/.databrickscfg b/databricks-sdk-java/src/test/resources/testdata/.databrickscfg index 2759b6c1b..3b4fa447d 100644 --- a/databricks-sdk-java/src/test/resources/testdata/.databrickscfg +++ b/databricks-sdk-java/src/test/resources/testdata/.databrickscfg @@ -38,4 +38,15 @@ google_credentials = paw48590aw8e09t8apu [pat.with.dot] host = https://dbc-XXXXXXXX-YYYY.cloud.databricks.com/ -token = PT0+IC9kZXYvdXJhbmRvbSA8PT0KYFZ \ No newline at end of file +token = PT0+IC9kZXYvdXJhbmRvbSA8PT0KYFZ + +[scope-empty] +host = https://example.cloud.databricks.com + +[scope-single] +host = https://example.cloud.databricks.com +scopes = clusters:read + +[scope-multiple] +host = https://example.cloud.databricks.com +scopes = clusters, jobs, pipelines, iam:read, files:read, mlflow, model-serving:read From 30e93a3c15c1d3551d3c7b2987a6c5bffa2e2c18 Mon Sep 17 00:00:00 2001 From: Tejas Kochar Date: Mon, 22 Dec 2025 05:15:06 +0000 Subject: [PATCH 2/2] No environment variable support for scopes --- .../src/main/java/com/databricks/sdk/core/DatabricksConfig.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java index 2ae780e10..1fcf4b3cf 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java @@ -36,7 +36,7 @@ public class DatabricksConfig { @ConfigAttribute(env = "DATABRICKS_CLIENT_SECRET", auth = "oauth", sensitive = true) private String clientSecret; - @ConfigAttribute(env = "DATABRICKS_SCOPES", auth = "oauth") + @ConfigAttribute(auth = "oauth") private List scopes; @ConfigAttribute(env = "DATABRICKS_REDIRECT_URL", auth = "oauth")