Skip to content

Commit 42aef75

Browse files
committed
Add support for parsing scopes from config file
1 parent 724548f commit 42aef75

File tree

4 files changed

+82
-1
lines changed

4 files changed

+82
-1
lines changed

databricks-sdk-java/src/main/java/com/databricks/sdk/core/ConfigAttributeAccessor.java

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,13 @@
22

33
import com.databricks.sdk.support.InternalApi;
44
import java.lang.reflect.Field;
5+
import java.lang.reflect.ParameterizedType;
56
import java.time.Duration;
7+
import java.util.Arrays;
8+
import java.util.List;
69
import java.util.Map;
710
import java.util.Objects;
11+
import java.util.stream.Collectors;
812

913
@InternalApi
1014
class ConfigAttributeAccessor {
@@ -63,6 +67,21 @@ public void setValueOnConfig(DatabricksConfig cfg, String value) throws IllegalA
6367
field.set(cfg, seconds > 0 ? Duration.ofSeconds(seconds) : null);
6468
} else if (field.getType() == ProxyConfig.ProxyAuthType.class) {
6569
field.set(cfg, ProxyConfig.ProxyAuthType.valueOf(value));
70+
} else if (List.class.isAssignableFrom(field.getType())) {
71+
// Handle List<String> fields (e.g., scopes)
72+
// Parse comma and/or whitespace separated values from environment variable or config file
73+
if (field.getGenericType() instanceof ParameterizedType) {
74+
ParameterizedType paramType = (ParameterizedType) field.getGenericType();
75+
if (paramType.getActualTypeArguments().length > 0
76+
&& paramType.getActualTypeArguments()[0] == String.class) {
77+
// Split by commas and/or whitespace and filter out empty strings
78+
List<String> list =
79+
Arrays.stream(value.trim().split("[,\\s]+"))
80+
.filter(s -> !s.isEmpty())
81+
.collect(Collectors.toList());
82+
field.set(cfg, list);
83+
}
84+
}
6685
}
6786
field.setAccessible(false);
6887
}
@@ -91,6 +110,9 @@ public String toString() {
91110
}
92111

93112
public String getAsString(Object value) {
113+
if (value instanceof List) {
114+
return String.join(" ", (List<String>) value);
115+
}
94116
return value.toString();
95117
}
96118

databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -204,6 +204,7 @@ private synchronized DatabricksConfig innerResolve() {
204204
try {
205205
ConfigLoader.resolve(this);
206206
ConfigLoader.validate(this);
207+
sortScopes();
207208
ConfigLoader.fixHostIfNeeded(this);
208209
initHttp();
209210
return this;
@@ -212,6 +213,13 @@ private synchronized DatabricksConfig innerResolve() {
212213
}
213214
}
214215

216+
/** Sort scopes in-place for better de-duplication in the refresh token cache. */
217+
private void sortScopes() {
218+
if (scopes != null && !scopes.isEmpty()) {
219+
java.util.Collections.sort(scopes);
220+
}
221+
}
222+
215223
private void initHttp() {
216224
if (httpClient != null) {
217225
return;

databricks-sdk-java/src/test/java/com/databricks/sdk/core/DatabricksConfigTest.java

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,10 +14,15 @@
1414
import java.io.IOException;
1515
import java.time.Duration;
1616
import java.util.ArrayList;
17+
import java.util.Arrays;
1718
import java.util.HashMap;
1819
import java.util.List;
1920
import java.util.Map;
21+
import java.util.stream.Stream;
2022
import org.junit.jupiter.api.Test;
23+
import org.junit.jupiter.params.ParameterizedTest;
24+
import org.junit.jupiter.params.provider.Arguments;
25+
import org.junit.jupiter.params.provider.MethodSource;
2126

2227
public class DatabricksConfigTest {
2328
@Test
@@ -322,4 +327,39 @@ public void testDisableOauthRefreshTokenEnvironmentVariable() {
322327

323328
assertEquals(true, config.getDisableOauthRefreshToken());
324329
}
330+
331+
// Config File Scope Parsing Tests
332+
333+
private static Stream<Arguments> provideConfigFileScopesTestCases() {
334+
return Stream.of(
335+
Arguments.of("Empty scopes defaults to all-apis", "scope-empty", Arrays.asList("all-apis")),
336+
Arguments.of("Single scope", "scope-single", Arrays.asList("clusters:read")),
337+
Arguments.of(
338+
"Multiple scopes sorted alphabetically",
339+
"scope-multiple",
340+
Arrays.asList(
341+
"clusters",
342+
"files:read",
343+
"iam:read",
344+
"jobs",
345+
"mlflow",
346+
"model-serving:read",
347+
"pipelines")));
348+
}
349+
350+
@ParameterizedTest(name = "{0}")
351+
@MethodSource("provideConfigFileScopesTestCases")
352+
public void testConfigFileScopes(String testName, String profile, List<String> expectedScopes) {
353+
Map<String, String> env = new HashMap<>();
354+
env.put("HOME", "src/test/resources/testdata");
355+
356+
DatabricksConfig config = new DatabricksConfig().setProfile(profile);
357+
config.resolve(new Environment(env, new ArrayList<>(), System.getProperty("os.name")));
358+
359+
List<String> scopes = config.getScopes();
360+
assertEquals(expectedScopes.size(), scopes.size());
361+
for (int i = 0; i < expectedScopes.size(); i++) {
362+
assertEquals(expectedScopes.get(i), scopes.get(i));
363+
}
364+
}
325365
}

databricks-sdk-java/src/test/resources/testdata/.databrickscfg

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,4 +38,15 @@ google_credentials = paw48590aw8e09t8apu
3838

3939
[pat.with.dot]
4040
host = https://dbc-XXXXXXXX-YYYY.cloud.databricks.com/
41-
token = PT0+IC9kZXYvdXJhbmRvbSA8PT0KYFZ
41+
token = PT0+IC9kZXYvdXJhbmRvbSA8PT0KYFZ
42+
43+
[scope-empty]
44+
host = https://example.cloud.databricks.com
45+
46+
[scope-single]
47+
host = https://example.cloud.databricks.com
48+
scopes = clusters:read
49+
50+
[scope-multiple]
51+
host = https://example.cloud.databricks.com
52+
scopes = clusters, jobs, pipelines, iam:read, files:read, mlflow, model-serving:read

0 commit comments

Comments
 (0)