Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,13 @@

import com.databricks.sdk.support.InternalApi;
import java.lang.reflect.Field;
import java.lang.reflect.ParameterizedType;
import java.time.Duration;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;

@InternalApi
class ConfigAttributeAccessor {
Expand Down Expand Up @@ -63,6 +67,21 @@ public void setValueOnConfig(DatabricksConfig cfg, String value) throws IllegalA
field.set(cfg, seconds > 0 ? Duration.ofSeconds(seconds) : null);
} else if (field.getType() == ProxyConfig.ProxyAuthType.class) {
field.set(cfg, ProxyConfig.ProxyAuthType.valueOf(value));
} else if (List.class.isAssignableFrom(field.getType())) {
// Handle List<String> fields (e.g., scopes)
Comment on lines 67 to +71
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I have a question about this big if-else block. This block doesn't have a default else, which throws an error when an unexpected type appears in the config. So is the current code simply ignoring those types?

// Parse comma and/or whitespace separated values from environment variable or config file
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
// Parse comma and/or whitespace separated values from environment variable or config file
// Parse comma and/or whitespace separated values from environment variable or config file.

if (field.getGenericType() instanceof ParameterizedType) {
ParameterizedType paramType = (ParameterizedType) field.getGenericType();
if (paramType.getActualTypeArguments().length > 0
&& paramType.getActualTypeArguments()[0] == String.class) {
// Split by commas and/or whitespace and filter out empty strings
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why do we have two delimiters? Shouldn't we only have a single delimiter, preferably a comma?

List<String> list =
Arrays.stream(value.trim().split("[,\\s]+"))
.filter(s -> !s.isEmpty())
.collect(Collectors.toList());
field.set(cfg, list);
}
}
}
field.setAccessible(false);
}
Expand Down Expand Up @@ -91,6 +110,9 @@ public String toString() {
}

public String getAsString(Object value) {
if (value instanceof List) {
return String.join(" ", (List<String>) value);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You check the instanceof with just List, however, you typecast the variable to List , I think it can lead to unexpected behaviors.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Also, why did you choose a space as a delimiter instead of a comma?

}
return value.toString();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ public class DatabricksConfig {
@ConfigAttribute(env = "DATABRICKS_CLIENT_SECRET", auth = "oauth", sensitive = true)
private String clientSecret;

@ConfigAttribute(env = "DATABRICKS_SCOPES", auth = "oauth")
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It actually wasn't possible to pass scopes through environment variable because Lists weren't parsed correctly, so removing this is not a breaking change.
Removing the environment variable attribute for consistency with the other SDKs where we are not supporting setting scopes via environment variables.

@ConfigAttribute(auth = "oauth")
private List<String> scopes;
Comment on lines -39 to 40
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is a backward-incompatible change. What's the rationale for removing this environment variable?


@ConfigAttribute(env = "DATABRICKS_REDIRECT_URL", auth = "oauth")
Expand Down Expand Up @@ -204,6 +204,7 @@ private synchronized DatabricksConfig innerResolve() {
try {
ConfigLoader.resolve(this);
ConfigLoader.validate(this);
sortScopes();
ConfigLoader.fixHostIfNeeded(this);
initHttp();
return this;
Expand All @@ -212,6 +213,13 @@ private synchronized DatabricksConfig innerResolve() {
}
}

/** Sort scopes in-place for better de-duplication in the refresh token cache. */
private void sortScopes() {
Comment on lines +216 to +217
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is a private function, so according to Java convention. We shouldn't have the comments in the javadoc format.

Suggested change
/** Sort scopes in-place for better de-duplication in the refresh token cache. */
private void sortScopes() {
// Sort scopes in-place for better de-duplication in the refresh token cache.
private void sortScopes() {

if (scopes != null && !scopes.isEmpty()) {
java.util.Collections.sort(scopes);
}
}

private void initHttp() {
if (httpClient != null) {
return;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,15 @@
import java.io.IOException;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Stream;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;

public class DatabricksConfigTest {
@Test
Expand Down Expand Up @@ -322,4 +327,39 @@ public void testDisableOauthRefreshTokenEnvironmentVariable() {

assertEquals(true, config.getDisableOauthRefreshToken());
}

// Config File Scope Parsing Tests

private static Stream<Arguments> provideConfigFileScopesTestCases() {
Comment on lines +331 to +333
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
// Config File Scope Parsing Tests
private static Stream<Arguments> provideConfigFileScopesTestCases() {
// Config File Scope Parsing Tests.
private static Stream<Arguments> provideConfigFileScopesTestCases() {

return Stream.of(
Arguments.of("Empty scopes defaults to all-apis", "scope-empty", Arrays.asList("all-apis")),
Arguments.of("Single scope", "scope-single", Arrays.asList("clusters:read")),
Arguments.of(
"Multiple scopes sorted alphabetically",
"scope-multiple",
Arrays.asList(
"clusters",
"files:read",
"iam:read",
"jobs",
"mlflow",
"model-serving:read",
"pipelines")));
}

@ParameterizedTest(name = "{0}")
@MethodSource("provideConfigFileScopesTestCases")
public void testConfigFileScopes(String testName, String profile, List<String> expectedScopes) {
Map<String, String> env = new HashMap<>();
env.put("HOME", "src/test/resources/testdata");

DatabricksConfig config = new DatabricksConfig().setProfile(profile);
config.resolve(new Environment(env, new ArrayList<>(), System.getProperty("os.name")));

List<String> scopes = config.getScopes();
assertEquals(expectedScopes.size(), scopes.size());
for (int i = 0; i < expectedScopes.size(); i++) {
assertEquals(expectedScopes.get(i), scopes.get(i));
}
Comment on lines +360 to +363
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can we use a direct comparison, like: assertEquals(expectedScopes, config.getScopes())? Instead of the comparison, you are doing? It seems a bit verbose.

}
}
13 changes: 12 additions & 1 deletion databricks-sdk-java/src/test/resources/testdata/.databrickscfg
Original file line number Diff line number Diff line change
Expand Up @@ -38,4 +38,15 @@ google_credentials = paw48590aw8e09t8apu

[pat.with.dot]
host = https://dbc-XXXXXXXX-YYYY.cloud.databricks.com/
token = PT0+IC9kZXYvdXJhbmRvbSA8PT0KYFZ
token = PT0+IC9kZXYvdXJhbmRvbSA8PT0KYFZ

[scope-empty]
host = https://example.cloud.databricks.com

[scope-single]
host = https://example.cloud.databricks.com
scopes = clusters:read

[scope-multiple]
host = https://example.cloud.databricks.com
scopes = clusters, jobs, pipelines, iam:read, files:read, mlflow, model-serving:read
Loading