diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 5dd8581f1..b30aa8046 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -7,11 +7,14 @@ ### Breaking Changes ### Bug Fixes +* Fixed Databricks CLI `--profile` fallback by detecting the CLI version at init time. The previous error-based detection was broken because `--profile` is a global Cobra flag silently accepted by old CLIs. ### Security Vulnerabilities ### Documentation ### Internal Changes +* Detect Databricks CLI version at init time via `databricks version --output json`, enabling version-gated flag support. Successful detections are cached per CLI path; subprocess failures fall back to the most conservative command and are retried on the next call. +* Pass `--force-refresh` to Databricks CLI `auth token` command (when the installed CLI is >= v0.296.0) so the SDK always receives a freshly minted token instead of a potentially stale one from the CLI's internal cache. ### API Changes diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/CliTokenSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/CliTokenSource.java index 7855b73c7..3e0dfc298 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/CliTokenSource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/CliTokenSource.java @@ -30,11 +30,6 @@ public class CliTokenSource implements TokenSource { private String accessTokenField; private String expiryField; private Environment env; - // fallbackCmd is tried when the primary command fails with "unknown flag: --profile", - // indicating the CLI is too old to support --profile. Can be removed once support - // for CLI versions predating --profile is dropped. - // See: https://github.com/databricks/databricks-sdk-go/pull/1497 - private List fallbackCmd; /** * Internal exception that carries the clean stderr message but exposes full output for checks. @@ -58,24 +53,11 @@ public CliTokenSource( String accessTokenField, String expiryField, Environment env) { - this(cmd, tokenTypeField, accessTokenField, expiryField, env, null); - } - - public CliTokenSource( - List cmd, - String tokenTypeField, - String accessTokenField, - String expiryField, - Environment env, - List fallbackCmd) { - super(); this.cmd = OSUtils.get(env).getCliExecutableCommand(cmd); this.tokenTypeField = tokenTypeField; this.accessTokenField = accessTokenField; this.expiryField = expiryField; this.env = env; - this.fallbackCmd = - fallbackCmd != null ? OSUtils.get(env).getCliExecutableCommand(fallbackCmd) : null; } /** @@ -158,22 +140,6 @@ public Token getToken() { try { return execCliCommand(this.cmd); } catch (IOException e) { - String textToCheck = - e instanceof CliCommandException - ? ((CliCommandException) e).getFullOutput() - : e.getMessage(); - if (fallbackCmd != null - && textToCheck != null - && textToCheck.contains("unknown flag: --profile")) { - LOG.warn( - "Databricks CLI does not support --profile flag. Falling back to --host. " - + "Please upgrade your CLI to the latest version."); - try { - return execCliCommand(this.fallbackCmd); - } catch (IOException fallbackException) { - throw new DatabricksException(fallbackException.getMessage(), fallbackException); - } - } throw new DatabricksException(e.getMessage(), e); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksCliCredentialsProvider.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksCliCredentialsProvider.java index ae401280d..17c30e173 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksCliCredentialsProvider.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksCliCredentialsProvider.java @@ -6,12 +6,19 @@ import com.databricks.sdk.core.oauth.OAuthHeaderFactory; import com.databricks.sdk.core.oauth.Token; import com.databricks.sdk.core.oauth.TokenSource; +import com.databricks.sdk.core.utils.Environment; import com.databricks.sdk.core.utils.OSUtils; import com.databricks.sdk.support.InternalApi; import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; +import java.io.IOException; +import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.TimeUnit; +import org.apache.commons.io.IOUtils; @InternalApi public class DatabricksCliCredentialsProvider implements CredentialsProvider { @@ -22,6 +29,26 @@ public class DatabricksCliCredentialsProvider implements CredentialsProvider { private static final ObjectMapper MAPPER = new ObjectMapper(); + // ---- Version detection ---- + + // --profile support added in CLI v0.207.1: https://github.com/databricks/cli/pull/855 + static final DatabricksCliVersion CLI_VERSION_FOR_PROFILE = new DatabricksCliVersion(0, 207, 1); + + // --force-refresh support added in CLI v0.296.0: https://github.com/databricks/cli/pull/4767 + static final DatabricksCliVersion CLI_VERSION_FOR_FORCE_REFRESH = + new DatabricksCliVersion(0, 296, 0); + + // 5-second cap on `databricks version` so a hung CLI (slow first-run scan, antivirus, blocked + // stdin) does not wedge SDK init indefinitely. + private static final long VERSION_PROBE_TIMEOUT_SECONDS = 5; + + // Successful version probes keyed by cliPath. Failures are deliberately not cached, so a + // transient error (timeout, AV scan) does not pin every later token source to the conservative + // fallback for the rest of the process lifetime. + private static final Map VERSION_CACHE = new ConcurrentHashMap<>(); + + // ---- Scope validation ---- + /** Thrown when the cached CLI token's scopes don't match the SDK's configured scopes. */ static class ScopeMismatchException extends DatabricksException { ScopeMismatchException(String message) { @@ -36,59 +63,13 @@ static class ScopeMismatchException extends DatabricksException { private static final Set SCOPES_IGNORED_FOR_COMPARISON = Collections.singleton("offline_access"); + // ---- Public API ---- + @Override public String authType() { return DATABRICKS_CLI; } - /** - * Builds the CLI command arguments using --host (legacy path). - * - * @param cliPath Path to the databricks CLI executable - * @param config Configuration containing host, account ID, workspace ID, etc. - * @return List of command arguments - */ - List buildHostArgs(String cliPath, DatabricksConfig config) { - List cmd = - new ArrayList<>(Arrays.asList(cliPath, "auth", "token", "--host", config.getHost())); - if (config.getClientType() == ClientType.ACCOUNT) { - cmd.add("--account-id"); - cmd.add(config.getAccountId()); - } - return cmd; - } - - private CliTokenSource getDatabricksCliTokenSource(DatabricksConfig config) { - String cliPath = config.getDatabricksCliPath(); - if (cliPath == null) { - cliPath = OSUtils.get(config.getEnv()).getDatabricksCliPath(); - } - if (cliPath == null) { - LOG.debug("Databricks CLI could not be found"); - return null; - } - - List cmd; - List fallbackCmd = null; - - if (config.getProfile() != null) { - // When profile is set, use --profile as the primary command. - // The profile contains the full config (host, account_id, etc.). - cmd = - new ArrayList<>( - Arrays.asList(cliPath, "auth", "token", "--profile", config.getProfile())); - // Build a --host fallback for older CLIs that don't support --profile. - if (config.getHost() != null) { - fallbackCmd = buildHostArgs(cliPath, config); - } - } else { - cmd = buildHostArgs(cliPath, config); - } - - return new CliTokenSource( - cmd, "token_type", "access_token", "expiry", config.getEnv(), fallbackCmd); - } - @Override public OAuthHeaderFactory configure(DatabricksConfig config) { String host = config.getHost(); @@ -151,6 +132,214 @@ public Token getToken() { } } + // ---- Token source construction ---- + + private CliTokenSource getDatabricksCliTokenSource(DatabricksConfig config) { + String cliPath = config.getDatabricksCliPath(); + if (cliPath == null) { + cliPath = OSUtils.get(config.getEnv()).getDatabricksCliPath(); + } + if (cliPath == null) { + LOG.debug("Databricks CLI could not be found"); + return null; + } + + List cmd = resolveCliCommand(cliPath, config); + return new CliTokenSource(cmd, "token_type", "access_token", "expiry", config.getEnv()); + } + + /** + * Detects the installed CLI version and builds the {@code auth token} command. Falls back to the + * most conservative command when version detection fails. + */ + List resolveCliCommand(String cliPath, DatabricksConfig config) { + DatabricksCliVersion version = getCliVersion(cliPath, config.getEnv()); + if (version.isDefaultDevBuild()) { + // A default-marker dev build has no injected version, so every feature gate fails. + // Surface an informational hint so users know why their feature flags aren't taking effect. + LOG.info( + "Databricks CLI {} is a development build; feature detection will use conservative " + + "fallbacks. Rebuild the CLI with an explicit version to enable capability-based " + + "flag selection.", + version); + } + return buildCliCommand(cliPath, config, version); + } + + /** + * Builds the full {@code auth token} command, including capability-gated flags. + * + *

Delegates the profile/host decision to {@link #buildCoreCliCommand} and appends {@code + * --force-refresh} when the installed CLI supports it. + */ + List buildCliCommand( + String cliPath, DatabricksConfig config, DatabricksCliVersion version) { + List cmd = buildCoreCliCommand(cliPath, config, version); + if (version.atLeast(CLI_VERSION_FOR_FORCE_REFRESH)) { + cmd.add("--force-refresh"); + } else if (version.equals(DatabricksCliVersion.UNKNOWN) || version.isDefaultDevBuild()) { + // Detection failed or no version metadata — we can't prove the CLI lacks --force-refresh, + // just failed to confirm it. The version probe already logged the underlying cause. + LOG.warn( + "Could not confirm --force-refresh support for Databricks CLI {} (requires >= {}). " + + "The CLI's token cache may provide stale tokens.", + version, + CLI_VERSION_FOR_FORCE_REFRESH); + } else { + LOG.warn( + "Databricks CLI {} does not support --force-refresh (requires >= {}). " + + "The CLI's token cache may provide stale tokens.", + version, + CLI_VERSION_FOR_FORCE_REFRESH); + } + return cmd; + } + + /** + * Builds the base {@code auth token} command without capability-gated flags. Falls back to {@code + * --host} when {@code --profile} is either not configured or not supported by the installed CLI. + */ + List buildCoreCliCommand( + String cliPath, DatabricksConfig config, DatabricksCliVersion version) { + if (config.getProfile() == null) { + return buildHostArgs(cliPath, config); + } + + // Flag --profile is a global CLI flag and is recognized for all commands even the ones that + // do not support it. Only use --profile in CLI versions known to support it in `auth token`. + if (!version.atLeast(CLI_VERSION_FOR_PROFILE)) { + if (version.equals(DatabricksCliVersion.UNKNOWN) || version.isDefaultDevBuild()) { + // We didn't actually prove the CLI lacks --profile; we just failed to confirm it. + LOG.warn( + "Could not confirm --profile support for Databricks CLI {} (requires >= {}). " + + "Falling back to --host.", + version, + CLI_VERSION_FOR_PROFILE); + } else { + LOG.warn( + "Databricks CLI {} does not support --profile (requires >= {}). Falling back to --host.", + version, + CLI_VERSION_FOR_PROFILE); + } + return buildHostArgs(cliPath, config); + } + + return new ArrayList<>( + Arrays.asList(cliPath, "auth", "token", "--profile", config.getProfile())); + } + + /** + * Builds the CLI command arguments using --host (legacy path). + * + * @param cliPath Path to the databricks CLI executable + * @param config Configuration containing host, account ID, workspace ID, etc. + * @return List of command arguments + */ + List buildHostArgs(String cliPath, DatabricksConfig config) { + List cmd = + new ArrayList<>(Arrays.asList(cliPath, "auth", "token", "--host", config.getHost())); + if (config.getClientType() == ClientType.ACCOUNT) { + cmd.add("--account-id"); + cmd.add(config.getAccountId()); + } + return cmd; + } + + // ---- Version detection ---- + + /** + * Returns the CLI version, catching subprocess failures so the caller can proceed with the + * conservative fallback. Successful results are cached per {@code cliPath} for the process + * lifetime; failures are not cached and will be retried on the next call. + */ + DatabricksCliVersion getCliVersion(String cliPath, Environment env) { + DatabricksCliVersion cached = VERSION_CACHE.get(cliPath); + if (cached != null) { + return cached; + } + + try { + DatabricksCliVersion version = probeCliVersion(cliPath, env); + VERSION_CACHE.put(cliPath, version); + return version; + } catch (Exception e) { + LOG.warn( + "Failed to detect Databricks CLI version: {}. Falling back to conservative flag set.", + e.getMessage()); + return DatabricksCliVersion.UNKNOWN; + } + } + + /** + * Runs {@code databricks version --output json} and returns the parsed {@link + * DatabricksCliVersion}. + */ + DatabricksCliVersion probeCliVersion(String cliPath, Environment env) throws IOException { + List versionArgs = Arrays.asList(cliPath, "version", "--output", "json"); + List cmd = OSUtils.get(env).getCliExecutableCommand(versionArgs); + + ProcessBuilder pb = new ProcessBuilder(cmd); + pb.environment().putAll(env.getEnv()); + Process process = pb.start(); + + try { + if (!process.waitFor(VERSION_PROBE_TIMEOUT_SECONDS, TimeUnit.SECONDS)) { + process.destroyForcibly(); + throw new IOException( + "timed out after " + + VERSION_PROBE_TIMEOUT_SECONDS + + "s waiting for `databricks version`"); + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new IOException("interrupted waiting for `databricks version`", e); + } + + if (process.exitValue() != 0) { + String stderr = readStream(process.getErrorStream()); + throw new IOException( + "`databricks version` exited with code " + process.exitValue() + ": " + stderr); + } + + return parseCliVersion(readStream(process.getInputStream())); + } + + /** + * Parses the JSON output of {@code databricks version --output json}. + * + *

Takes Major/Minor/Patch from the JSON's pre-parsed numeric fields. The Prerelease field and + * the Version string are intentionally ignored: for our feature-gate purposes the base triple is + * sufficient, and the (0, 0, 0) case already identifies the default dev build (a CLI built + * without version metadata leaves these fields at their zero defaults). + * + *

Returns {@link DatabricksCliVersion#UNKNOWN} on failure so that an unparseable version + * disables every feature gate. + */ + static DatabricksCliVersion parseCliVersion(String output) { + try { + JsonNode node = MAPPER.readTree(output); + JsonNode major = node.get("Major"); + JsonNode minor = node.get("Minor"); + JsonNode patch = node.get("Patch"); + if (major == null || minor == null || patch == null) { + LOG.debug( + "Failed to parse Databricks CLI version: missing Major/Minor/Patch in {}", output); + return DatabricksCliVersion.UNKNOWN; + } + return new DatabricksCliVersion(major.asInt(), minor.asInt(), patch.asInt()); + } catch (JsonProcessingException e) { + LOG.debug( + "Failed to parse Databricks CLI version from output: {} ({})", output, e.getMessage()); + return DatabricksCliVersion.UNKNOWN; + } + } + + private static String readStream(InputStream stream) throws IOException { + return new String(IOUtils.toByteArray(stream), StandardCharsets.UTF_8); + } + + // ---- Scope validation ---- + /** * Validate that the token's scopes match the requested scopes from the config. * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksCliVersion.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksCliVersion.java new file mode 100644 index 000000000..19065a3fc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksCliVersion.java @@ -0,0 +1,93 @@ +package com.databricks.sdk.core; + +import com.databricks.sdk.support.InternalApi; +import java.util.Objects; + +/** + * Semver version triple of the Databricks CLI used for capability gating. + * + *

Three sentinel states in the (major, minor, patch) tuple: + * + *

    + *
  • {@code (-1, -1, -1)} — the {@link #UNKNOWN} sentinel, meaning version detection failed. It + * compares less than every real release so every feature gate fails. + *
  • {@code (0, 0, 0)} — the CLI's default dev build, emitted when the binary was built without + * version metadata. See {@link #isDefaultDevBuild()}. + *
  • anything else — a real CLI version. + *
+ * + *

Prerelease tags are deliberately ignored: feature gates are release-based, so a prerelease of + * a version with a flag is assumed to have the flag too. + */ +@InternalApi +public final class DatabricksCliVersion implements Comparable { + public static final DatabricksCliVersion UNKNOWN = new DatabricksCliVersion(-1, -1, -1); + + private final int major; + private final int minor; + private final int patch; + + public DatabricksCliVersion(int major, int minor, int patch) { + this.major = major; + this.minor = minor; + this.patch = patch; + } + + public int getMajor() { + return major; + } + + public int getMinor() { + return minor; + } + + public int getPatch() { + return patch; + } + + /** Returns true when {@code this} is greater than or equal to {@code other}. */ + public boolean atLeast(DatabricksCliVersion other) { + return compareTo(other) >= 0; + } + + /** + * Returns true when the version is the CLI's default dev build {@code (0, 0, 0)}. A CLI built + * without version metadata leaves these fields at their zero defaults. + */ + public boolean isDefaultDevBuild() { + return major == 0 && minor == 0 && patch == 0; + } + + @Override + public int compareTo(DatabricksCliVersion o) { + int c = Integer.compare(major, o.major); + if (c != 0) return c; + c = Integer.compare(minor, o.minor); + if (c != 0) return c; + return Integer.compare(patch, o.patch); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof DatabricksCliVersion)) return false; + DatabricksCliVersion that = (DatabricksCliVersion) o; + return major == that.major && minor == that.minor && patch == that.patch; + } + + @Override + public int hashCode() { + return Objects.hash(major, minor, patch); + } + + @Override + public String toString() { + if (equals(UNKNOWN)) { + return "unknown"; + } + if (isDefaultDevBuild()) { + return "v0.0.0-dev"; + } + return "v" + major + "." + minor + "." + patch; + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/CliTokenSourceTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/CliTokenSourceTest.java index 8476c6de5..28d3deaf6 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/CliTokenSourceTest.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/CliTokenSourceTest.java @@ -28,7 +28,6 @@ import java.util.List; import java.util.Map; import java.util.TimeZone; -import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; @@ -217,128 +216,24 @@ public void testParseExpiry(String input, Instant expectedInstant, String descri } } - // ---- Fallback tests for --profile flag handling ---- + // ---- Error propagation ---- - private CliTokenSource makeTokenSource( - Environment env, List primaryCmd, List fallbackCmd) { + private CliTokenSource makeTokenSource(Environment env, List cmd) { OSUtilities osUtils = mock(OSUtilities.class); when(osUtils.getCliExecutableCommand(any())).thenAnswer(inv -> inv.getArgument(0)); try (MockedStatic mockedOSUtils = mockStatic(OSUtils.class)) { mockedOSUtils.when(() -> OSUtils.get(any())).thenReturn(osUtils); - return new CliTokenSource( - primaryCmd, "token_type", "access_token", "expiry", env, fallbackCmd); - } - } - - private String validTokenJson(String accessToken) { - String expiry = - ZonedDateTime.now() - .plusHours(1) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSXXX")); - return String.format( - "{\"token_type\":\"Bearer\",\"access_token\":\"%s\",\"expiry\":\"%s\"}", - accessToken, expiry); - } - - @Test - public void testFallbackOnUnknownProfileFlagInStderr() { - Environment env = mock(Environment.class); - when(env.getEnv()).thenReturn(new HashMap<>()); - - List primaryCmd = - Arrays.asList("databricks", "auth", "token", "--profile", "my-profile"); - List fallbackCmdList = - Arrays.asList("databricks", "auth", "token", "--host", "https://workspace.databricks.com"); - - CliTokenSource tokenSource = makeTokenSource(env, primaryCmd, fallbackCmdList); - - AtomicInteger callCount = new AtomicInteger(0); - try (MockedConstruction mocked = - mockConstruction( - ProcessBuilder.class, - (pb, context) -> { - if (callCount.getAndIncrement() == 0) { - Process failProcess = mock(Process.class); - when(failProcess.getInputStream()) - .thenReturn(new ByteArrayInputStream(new byte[0])); - when(failProcess.getErrorStream()) - .thenReturn( - new ByteArrayInputStream("Error: unknown flag: --profile".getBytes())); - when(failProcess.waitFor()).thenReturn(1); - when(pb.start()).thenReturn(failProcess); - } else { - Process successProcess = mock(Process.class); - when(successProcess.getInputStream()) - .thenReturn( - new ByteArrayInputStream(validTokenJson("fallback-token").getBytes())); - when(successProcess.getErrorStream()) - .thenReturn(new ByteArrayInputStream(new byte[0])); - when(successProcess.waitFor()).thenReturn(0); - when(pb.start()).thenReturn(successProcess); - } - })) { - Token token = tokenSource.getToken(); - assertEquals("fallback-token", token.getAccessToken()); - assertEquals(2, mocked.constructed().size()); + return new CliTokenSource(cmd, "token_type", "access_token", "expiry", env); } } @Test - public void testFallbackTriggeredWhenUnknownFlagInStdout() { - // Fallback triggers even when "unknown flag" appears in stdout rather than stderr. + public void testCliErrorPropagates() { Environment env = mock(Environment.class); when(env.getEnv()).thenReturn(new HashMap<>()); - List primaryCmd = - Arrays.asList("databricks", "auth", "token", "--profile", "my-profile"); - List fallbackCmdList = - Arrays.asList("databricks", "auth", "token", "--host", "https://workspace.databricks.com"); - - CliTokenSource tokenSource = makeTokenSource(env, primaryCmd, fallbackCmdList); - - AtomicInteger callCount = new AtomicInteger(0); - try (MockedConstruction mocked = - mockConstruction( - ProcessBuilder.class, - (pb, context) -> { - if (callCount.getAndIncrement() == 0) { - Process failProcess = mock(Process.class); - when(failProcess.getInputStream()) - .thenReturn( - new ByteArrayInputStream("Error: unknown flag: --profile".getBytes())); - when(failProcess.getErrorStream()) - .thenReturn(new ByteArrayInputStream(new byte[0])); - when(failProcess.waitFor()).thenReturn(1); - when(pb.start()).thenReturn(failProcess); - } else { - Process successProcess = mock(Process.class); - when(successProcess.getInputStream()) - .thenReturn( - new ByteArrayInputStream(validTokenJson("fallback-token").getBytes())); - when(successProcess.getErrorStream()) - .thenReturn(new ByteArrayInputStream(new byte[0])); - when(successProcess.waitFor()).thenReturn(0); - when(pb.start()).thenReturn(successProcess); - } - })) { - Token token = tokenSource.getToken(); - assertEquals("fallback-token", token.getAccessToken()); - assertEquals(2, mocked.constructed().size()); - } - } - - @Test - public void testNoFallbackOnRealAuthError() { - // When the primary fails with a real error (not unknown flag), no fallback is attempted. - Environment env = mock(Environment.class); - when(env.getEnv()).thenReturn(new HashMap<>()); - - List primaryCmd = - Arrays.asList("databricks", "auth", "token", "--profile", "my-profile"); - List fallbackCmdList = - Arrays.asList("databricks", "auth", "token", "--host", "https://workspace.databricks.com"); - - CliTokenSource tokenSource = makeTokenSource(env, primaryCmd, fallbackCmdList); + CliTokenSource tokenSource = + makeTokenSource(env, Arrays.asList("databricks", "auth", "token", "--host", "https://x")); try (MockedConstruction mocked = mockConstruction( @@ -358,33 +253,4 @@ public void testNoFallbackOnRealAuthError() { assertEquals(1, mocked.constructed().size()); } } - - @Test - public void testNoFallbackWhenFallbackCmdNotSet() { - // When fallbackCmd is null and the primary fails with unknown flag, original error propagates. - Environment env = mock(Environment.class); - when(env.getEnv()).thenReturn(new HashMap<>()); - - List primaryCmd = - Arrays.asList("databricks", "auth", "token", "--profile", "my-profile"); - - CliTokenSource tokenSource = makeTokenSource(env, primaryCmd, null); - - try (MockedConstruction mocked = - mockConstruction( - ProcessBuilder.class, - (pb, context) -> { - Process failProcess = mock(Process.class); - when(failProcess.getInputStream()).thenReturn(new ByteArrayInputStream(new byte[0])); - when(failProcess.getErrorStream()) - .thenReturn( - new ByteArrayInputStream("Error: unknown flag: --profile".getBytes())); - when(failProcess.waitFor()).thenReturn(1); - when(pb.start()).thenReturn(failProcess); - })) { - DatabricksException ex = assertThrows(DatabricksException.class, tokenSource::getToken); - assertTrue(ex.getMessage().contains("unknown flag: --profile")); - assertEquals(1, mocked.constructed().size()); - } - } } diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/DatabricksCliCredentialsProviderTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/DatabricksCliCredentialsProviderTest.java index 0f1ca5059..dc18d0e11 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/DatabricksCliCredentialsProviderTest.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/DatabricksCliCredentialsProviderTest.java @@ -4,7 +4,11 @@ import java.util.Arrays; import java.util.List; +import java.util.stream.Stream; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; class DatabricksCliCredentialsProviderTest { @@ -12,11 +16,14 @@ class DatabricksCliCredentialsProviderTest { private static final String HOST = "https://my-workspace.cloud.databricks.com"; private static final String ACCOUNT_HOST = "https://accounts.cloud.databricks.com"; private static final String ACCOUNT_ID = "test-account-123"; + private static final String PROFILE = "my-profile"; private final DatabricksCliCredentialsProvider provider = new DatabricksCliCredentialsProvider(); + // ---- buildHostArgs tests ---- + @Test - void testBuildHostArgs_WorkspaceHost() { + void testBuildHostCommand_WorkspaceHost() { DatabricksConfig config = new DatabricksConfig().setHost(HOST); List cmd = provider.buildHostArgs(CLI_PATH, config); @@ -25,7 +32,7 @@ void testBuildHostArgs_WorkspaceHost() { } @Test - void testBuildHostArgs_AccountHost() { + void testBuildHostCommand_AccountHost() { DatabricksConfig config = new DatabricksConfig().setHost(ACCOUNT_HOST).setAccountId(ACCOUNT_ID); List cmd = provider.buildHostArgs(CLI_PATH, config); @@ -37,7 +44,7 @@ void testBuildHostArgs_AccountHost() { } @Test - void testBuildHostArgs_NonAccountsHostWithAccountId() { + void testBuildHostCommand_NonAccountsHostWithAccountId() { // Non-accounts hosts should not pass --account-id even if accountId is set DatabricksConfig config = new DatabricksConfig().setHost(HOST).setAccountId(ACCOUNT_ID); @@ -45,4 +52,117 @@ void testBuildHostArgs_NonAccountsHostWithAccountId() { assertEquals(Arrays.asList(CLI_PATH, "auth", "token", "--host", HOST), cmd); } + + // ---- buildCliCommand tests ---- + + private static Stream buildCliCommandCases() { + return Stream.of( + Arguments.of( + "host only — old CLI, no force-refresh", + new DatabricksConfig().setHost(HOST), + new DatabricksCliVersion(0, 200, 0), + Arrays.asList(CLI_PATH, "auth", "token", "--host", HOST)), + Arguments.of( + "host only — new CLI, with force-refresh", + new DatabricksConfig().setHost(HOST), + new DatabricksCliVersion(0, 296, 0), + Arrays.asList(CLI_PATH, "auth", "token", "--host", HOST, "--force-refresh")), + Arguments.of( + "account host — old CLI, no force-refresh", + new DatabricksConfig().setHost(ACCOUNT_HOST).setAccountId(ACCOUNT_ID), + new DatabricksCliVersion(0, 200, 0), + Arrays.asList( + CLI_PATH, "auth", "token", "--host", ACCOUNT_HOST, "--account-id", ACCOUNT_ID)), + Arguments.of( + "account host — new CLI, with force-refresh", + new DatabricksConfig().setHost(ACCOUNT_HOST).setAccountId(ACCOUNT_ID), + new DatabricksCliVersion(0, 296, 0), + Arrays.asList( + CLI_PATH, + "auth", + "token", + "--host", + ACCOUNT_HOST, + "--account-id", + ACCOUNT_ID, + "--force-refresh")), + Arguments.of( + "profile with profile-supporting CLI — uses --profile, no force-refresh", + new DatabricksConfig().setProfile(PROFILE).setHost(HOST), + new DatabricksCliVersion(0, 207, 1), + Arrays.asList(CLI_PATH, "auth", "token", "--profile", PROFILE)), + Arguments.of( + "profile with newest CLI — uses --profile and --force-refresh", + new DatabricksConfig().setProfile(PROFILE).setHost(HOST), + new DatabricksCliVersion(0, 296, 0), + Arrays.asList(CLI_PATH, "auth", "token", "--profile", PROFILE, "--force-refresh")), + Arguments.of( + "profile with old CLI — falls back to --host, no force-refresh", + new DatabricksConfig().setProfile(PROFILE).setHost(HOST), + new DatabricksCliVersion(0, 207, 0), + Arrays.asList(CLI_PATH, "auth", "token", "--host", HOST)), + Arguments.of( + "unknown version — falls back to --host, no force-refresh", + new DatabricksConfig().setProfile(PROFILE).setHost(HOST), + DatabricksCliVersion.UNKNOWN, + Arrays.asList(CLI_PATH, "auth", "token", "--host", HOST)), + Arguments.of( + "dev build — falls back to --host, no force-refresh", + new DatabricksConfig().setProfile(PROFILE).setHost(HOST), + new DatabricksCliVersion(0, 0, 0), + Arrays.asList(CLI_PATH, "auth", "token", "--host", HOST))); + } + + @ParameterizedTest(name = "{0}") + @MethodSource("buildCliCommandCases") + void testBuildCliCommand( + String name, DatabricksConfig config, DatabricksCliVersion version, List expected) { + assertEquals(expected, provider.buildCliCommand(CLI_PATH, config, version)); + } + + // ---- parseCliVersion tests ---- + + @Test + void testParseCliVersion_StandardOutput() { + String json = + "{\"Version\":\"v0.295.0\",\"Major\":0,\"Minor\":295,\"Patch\":0,\"Prerelease\":\"\",\"BuildMetadata\":\"\"}"; + assertEquals( + new DatabricksCliVersion(0, 295, 0), + DatabricksCliCredentialsProvider.parseCliVersion(json)); + } + + @Test + void testParseCliVersion_ProfileVersion() { + String json = "{\"Version\":\"v0.207.1\",\"Major\":0,\"Minor\":207,\"Patch\":1}"; + assertEquals( + new DatabricksCliVersion(0, 207, 1), + DatabricksCliCredentialsProvider.parseCliVersion(json)); + } + + @Test + void testParseCliVersion_DevBuild() { + String json = + "{\"Version\":\"v0.0.0-dev+abc123\",\"Major\":0,\"Minor\":0,\"Patch\":0,\"Prerelease\":\"dev\"}"; + assertEquals( + new DatabricksCliVersion(0, 0, 0), DatabricksCliCredentialsProvider.parseCliVersion(json)); + } + + @Test + void testParseCliVersion_MissingFields() { + String json = "{\"Version\":\"v0.295.0\"}"; + assertEquals( + DatabricksCliVersion.UNKNOWN, DatabricksCliCredentialsProvider.parseCliVersion(json)); + } + + @Test + void testParseCliVersion_MalformedJson() { + assertEquals( + DatabricksCliVersion.UNKNOWN, DatabricksCliCredentialsProvider.parseCliVersion("not json")); + } + + @Test + void testParseCliVersion_EmptyString() { + assertEquals( + DatabricksCliVersion.UNKNOWN, DatabricksCliCredentialsProvider.parseCliVersion("")); + } } diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/DatabricksCliVersionTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/DatabricksCliVersionTest.java new file mode 100644 index 000000000..e0d7ef9f2 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/DatabricksCliVersionTest.java @@ -0,0 +1,66 @@ +package com.databricks.sdk.core; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.junit.jupiter.api.Test; + +class DatabricksCliVersionTest { + + @Test + void testAtLeast_equal() { + assertTrue(new DatabricksCliVersion(0, 207, 1).atLeast(new DatabricksCliVersion(0, 207, 1))); + } + + @Test + void testAtLeast_higherPatch() { + assertTrue(new DatabricksCliVersion(0, 207, 2).atLeast(new DatabricksCliVersion(0, 207, 1))); + assertFalse(new DatabricksCliVersion(0, 207, 0).atLeast(new DatabricksCliVersion(0, 207, 1))); + } + + @Test + void testAtLeast_higherMinor() { + assertTrue(new DatabricksCliVersion(0, 296, 0).atLeast(new DatabricksCliVersion(0, 207, 1))); + assertFalse(new DatabricksCliVersion(0, 100, 99).atLeast(new DatabricksCliVersion(0, 207, 1))); + } + + @Test + void testAtLeast_higherMajor() { + assertTrue(new DatabricksCliVersion(1, 0, 0).atLeast(new DatabricksCliVersion(0, 999, 999))); + assertFalse(new DatabricksCliVersion(0, 999, 999).atLeast(new DatabricksCliVersion(1, 0, 0))); + } + + @Test + void testAtLeast_unknownIsLessThanEverything() { + assertFalse(DatabricksCliVersion.UNKNOWN.atLeast(new DatabricksCliVersion(0, 0, 0))); + assertFalse(DatabricksCliVersion.UNKNOWN.atLeast(new DatabricksCliVersion(0, 207, 1))); + } + + @Test + void testIsDefaultDevBuild() { + assertTrue(new DatabricksCliVersion(0, 0, 0).isDefaultDevBuild()); + assertFalse(new DatabricksCliVersion(0, 0, 1).isDefaultDevBuild()); + assertFalse(DatabricksCliVersion.UNKNOWN.isDefaultDevBuild()); + } + + @Test + void testToString() { + assertEquals("v0.207.1", new DatabricksCliVersion(0, 207, 1).toString()); + assertEquals("v1.0.0", new DatabricksCliVersion(1, 0, 0).toString()); + assertEquals("v0.0.0-dev", new DatabricksCliVersion(0, 0, 0).toString()); + assertEquals("unknown", DatabricksCliVersion.UNKNOWN.toString()); + } + + @Test + void testEqualsAndHashCode() { + assertEquals(new DatabricksCliVersion(0, 207, 1), new DatabricksCliVersion(0, 207, 1)); + assertEquals( + new DatabricksCliVersion(0, 207, 1).hashCode(), + new DatabricksCliVersion(0, 207, 1).hashCode()); + assertNotEquals(new DatabricksCliVersion(0, 207, 1), new DatabricksCliVersion(0, 207, 2)); + assertNotEquals(new DatabricksCliVersion(0, 207, 1), null); + assertNotEquals(new DatabricksCliVersion(0, 207, 1), "v0.207.1"); + } +}