Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions acceptance/cmd/auth/login/preserve-fields/out.databrickscfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
[DEFAULT]
host = [DATABRICKS_URL]
cluster_id = existing-cluster-123
warehouse_id = warehouse-456
azure_environment = USGOVERNMENT
custom_key = my-custom-value
auth_type = databricks-cli
5 changes: 5 additions & 0 deletions acceptance/cmd/auth/login/preserve-fields/out.test.toml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

21 changes: 21 additions & 0 deletions acceptance/cmd/auth/login/preserve-fields/output.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@

=== Initial profile
[DEFAULT]
host = [DATABRICKS_URL]
cluster_id = existing-cluster-123
warehouse_id = warehouse-456
azure_environment = USGOVERNMENT
custom_key = my-custom-value

=== Run auth login (no --configure-cluster or --configure-serverless)
>>> [CLI] auth login --host [DATABRICKS_URL] --profile DEFAULT
Profile DEFAULT was successfully saved

=== Profile after auth login — all non-auth fields should be preserved
[DEFAULT]
host = [DATABRICKS_URL]
cluster_id = existing-cluster-123
warehouse_id = warehouse-456
azure_environment = USGOVERNMENT
custom_key = my-custom-value
auth_type = databricks-cli
28 changes: 28 additions & 0 deletions acceptance/cmd/auth/login/preserve-fields/script
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
sethome "./home"

# Create an initial profile with cluster_id, warehouse_id, azure_environment,
# and a custom key that is not a recognized SDK config attribute.
cat > "./home/.databrickscfg" <<EOF
[DEFAULT]
host = $DATABRICKS_HOST
cluster_id = existing-cluster-123
warehouse_id = warehouse-456
azure_environment = USGOVERNMENT
custom_key = my-custom-value
EOF

title "Initial profile\n"
cat "./home/.databrickscfg"

# Use a fake browser that performs a GET on the authorization URL
# and follows the redirect back to localhost.
export BROWSER="browser.py"

title "Run auth login (no --configure-cluster or --configure-serverless)"
trace $CLI auth login --host $DATABRICKS_HOST --profile DEFAULT

title "Profile after auth login — all non-auth fields should be preserved\n"
cat "./home/.databrickscfg"

# Track the .databrickscfg file that was created to surface changes.
mv "./home/.databrickscfg" "./out.databrickscfg"
3 changes: 3 additions & 0 deletions acceptance/cmd/auth/login/preserve-fields/test.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
Ignore = [
"home"
]
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
[DEFAULT]
host = https://host
account_id = acc-123
workspace_id = ws-456
token = [DATABRICKS_TOKEN]
5 changes: 5 additions & 0 deletions acceptance/cmd/configure/clears-oauth-on-pat/out.test.toml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

19 changes: 19 additions & 0 deletions acceptance/cmd/configure/clears-oauth-on-pat/output.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@

=== Initial profile (OAuth with unified host)
[DEFAULT]
host = https://host
auth_type = databricks-cli
scopes = all-apis
experimental_is_unified_host = true
account_id = acc-123
workspace_id = ws-456

=== Run configure with PAT token
>>> [CLI] configure --token --host https://host

=== Profile after PAT configure
[DEFAULT]
host = https://host
account_id = acc-123
workspace_id = ws-456
token = [DATABRICKS_TOKEN]
24 changes: 24 additions & 0 deletions acceptance/cmd/configure/clears-oauth-on-pat/script
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
sethome "./home"

# Pre-populate a profile with OAuth metadata and unified host fields
# (as if `auth login` was previously run against a unified host).
cat > "./home/.databrickscfg" <<EOF
[DEFAULT]
host = https://host
auth_type = databricks-cli
scopes = all-apis
experimental_is_unified_host = true
account_id = acc-123
workspace_id = ws-456
EOF

title "Initial profile (OAuth with unified host)\n"
cat "./home/.databrickscfg"

title "Run configure with PAT token"
echo "new-token" | trace $CLI configure --token --host https://host

title "Profile after PAT configure\n"
cat "./home/.databrickscfg"

mv "./home/.databrickscfg" "./out.databrickscfg"
3 changes: 3 additions & 0 deletions acceptance/cmd/configure/clears-oauth-on-pat/test.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
Ignore = [
"home"
]
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
[DEFAULT]
host = https://host
cluster_id = env-cluster-789
token = [DATABRICKS_TOKEN]

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@

=== Initial profile with serverless_compute_id
[DEFAULT]
host = https://host
serverless_compute_id = auto

=== Run configure with cluster from env var
>>> [CLI] configure --token --host https://host

=== Profile after configure (serverless should be cleared)
[DEFAULT]
host = https://host
cluster_id = env-cluster-789
token = [DATABRICKS_TOKEN]
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
sethome "./home"

# Pre-populate a profile with serverless_compute_id.
cat > "./home/.databrickscfg" <<EOF
[DEFAULT]
host = https://host
serverless_compute_id = auto
EOF

title "Initial profile with serverless_compute_id\n"
cat "./home/.databrickscfg"

# Set cluster ID via env var (not via --configure-cluster flag).
export DATABRICKS_CLUSTER_ID=env-cluster-789

title "Run configure with cluster from env var"
echo "new-token" | trace $CLI configure --token --host https://host

title "Profile after configure (serverless should be cleared)\n"
cat "./home/.databrickscfg"

mv "./home/.databrickscfg" "./out.databrickscfg"
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
Ignore = [
"home"
]
47 changes: 34 additions & 13 deletions cmd/auth/login.go
Original file line number Diff line number Diff line change
Expand Up @@ -155,10 +155,18 @@ depends on the existing profiles you have set in your configuration file
authArguments.Profile = profileName

var scopesList []string
if scopes != "" {
switch {
case scopes != "":
// Explicit --scopes flag takes precedence.
for _, s := range strings.Split(scopes, ",") {
scopesList = append(scopesList, strings.TrimSpace(s))
}
case existingProfile != nil && existingProfile.Scopes != "":
// Preserve scopes from the existing profile so re-login
// uses the same scopes the user previously configured.
for _, s := range strings.Split(existingProfile.Scopes, ",") {
scopesList = append(scopesList, strings.TrimSpace(s))
}
}

oauthArgument, err := authArguments.ToOAuthArgument()
Expand Down Expand Up @@ -190,6 +198,18 @@ depends on the existing profiles you have set in your configuration file
// 2. Saving the profile.

var clusterID, serverlessComputeID string

// Keys to explicitly remove from the profile. OAuth login always
// clears incompatible credential fields (PAT, basic auth, M2M).
clearKeys := oauthLoginClearKeys()

// Boolean false is zero-valued and skipped by SaveToProfile's IsZero
// check. Explicitly clear experimental_is_unified_host when false so
// it doesn't remain sticky from a previous login.
if !authArguments.IsUnifiedHost {
clearKeys = append(clearKeys, "experimental_is_unified_host")
}

switch {
case configureCluster:
// Create a workspace client to list clusters for interactive selection.
Expand All @@ -210,20 +230,14 @@ depends on the existing profiles you have set in your configuration file
if err != nil {
return err
}
// Cluster and serverless are mutually exclusive.
clearKeys = append(clearKeys, "serverless_compute_id")
case configureServerless:
serverlessComputeID = "auto"
// Cluster and serverless are mutually exclusive.
clearKeys = append(clearKeys, "cluster_id")
default:
// Respect the existing profile if it exists, even if it has
// both cluster and serverless configured. Tools relying on
// these fields from the profile will need to handle this case.
//
// TODO: consider whether we should use this an an opportunity
// to clean up the profile under the assumption that serverless
// is the preferred option.
if existingProfile != nil {
clusterID = existingProfile.ClusterID
serverlessComputeID = existingProfile.ServerlessComputeID
}
// Neither flag: preserve both from existing profile via merge semantics.
}

if profileName != "" {
Expand All @@ -238,7 +252,7 @@ depends on the existing profiles you have set in your configuration file
ConfigFile: os.Getenv("DATABRICKS_CONFIG_FILE"),
ServerlessComputeID: serverlessComputeID,
Scopes: scopesList,
})
}, clearKeys...)
if err != nil {
return err
}
Expand Down Expand Up @@ -401,6 +415,13 @@ func openURLSuppressingStderr(url string) error {
return browserpkg.OpenURL(url)
}

// oauthLoginClearKeys returns profile keys that should be explicitly removed
// when performing an OAuth login. Derives auth credential fields dynamically
// from the SDK's ConfigAttributes to stay in sync as new auth methods are added.
func oauthLoginClearKeys() []string {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This list can evolve if we support more auth in the future. That was probably the rationale behind nuking everything before and having an allow list. Not a blocker just wanted to flag this.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Good point, I will refactor to derive the list dynamically from config.ConfigAttributes using HasAuthAttribute(), so new auth fields added to the SDK are picked up automatically. Both oauthLoginClearKeys and the configure clear-keys now use this

return databrickscfg.AuthCredentialKeys()
}

// getBrowserFunc returns a function that opens the given URL in the browser.
// It respects the BROWSER environment variable:
// - empty string: uses the default browser
Expand Down
24 changes: 21 additions & 3 deletions cmd/auth/token.go
Original file line number Diff line number Diff line change
Expand Up @@ -423,14 +423,27 @@ func runInlineLogin(ctx context.Context, profiler profile.Profiler) (string, *pr

loginArgs.Profile = profileName

// Preserve scopes from the existing profile so the inline login
// uses the same scopes the user previously configured.
var scopesList []string
if existingProfile != nil && existingProfile.Scopes != "" {
for _, s := range strings.Split(existingProfile.Scopes, ",") {
scopesList = append(scopesList, strings.TrimSpace(s))
}
}

oauthArgument, err := loginArgs.ToOAuthArgument()
if err != nil {
return "", nil, err
}
persistentAuth, err := u2m.NewPersistentAuth(ctx,
persistentAuthOpts := []u2m.PersistentAuthOption{
u2m.WithOAuthArgument(oauthArgument),
u2m.WithBrowser(openURLSuppressingStderr),
)
}
if len(scopesList) > 0 {
persistentAuthOpts = append(persistentAuthOpts, u2m.WithScopes(scopesList))
}
persistentAuth, err := u2m.NewPersistentAuth(ctx, persistentAuthOpts...)
if err != nil {
return "", nil, err
}
Expand All @@ -443,6 +456,10 @@ func runInlineLogin(ctx context.Context, profiler profile.Profiler) (string, *pr
return "", nil, err
}

clearKeys := oauthLoginClearKeys()
if !loginArgs.IsUnifiedHost {
clearKeys = append(clearKeys, "experimental_is_unified_host")
}
err = databrickscfg.SaveToProfile(ctx, &config.Config{
Profile: profileName,
Host: loginArgs.Host,
Expand All @@ -451,7 +468,8 @@ func runInlineLogin(ctx context.Context, profiler profile.Profiler) (string, *pr
WorkspaceID: loginArgs.WorkspaceID,
Experimental_IsUnifiedHost: loginArgs.IsUnifiedHost,
ConfigFile: os.Getenv("DATABRICKS_CONFIG_FILE"),
})
Scopes: scopesList,
}, clearKeys...)
if err != nil {
return "", nil, err
}
Expand Down
27 changes: 25 additions & 2 deletions cmd/configure/configure.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,15 @@ import (
"github.com/spf13/cobra"
)

// patConfigureExtraClearKeys lists non-credential profile keys that should also
// be cleared when saving a PAT-based profile. Auth credential keys are derived
// dynamically from config.ConfigAttributes via databrickscfg.AuthCredentialKeys().
var patConfigureExtraClearKeys = []string{
"auth_type",
"scopes",
"databricks_cli_path",
}

func configureInteractive(cmd *cobra.Command, flags *configureFlags, cfg *config.Config) error {
ctx := cmd.Context()

Expand Down Expand Up @@ -141,14 +150,28 @@ The host must be specified with the --host flag or the DATABRICKS_HOST environme
// This is relevant for OAuth only.
cfg.DatabricksCliPath = ""

// Save profile to config file.
// Save profile to config file. PAT-based configure clears all
// non-PAT auth credentials and OAuth metadata to prevent
// multi-auth conflicts in the profile.
clearKeys := append(databrickscfg.AuthCredentialKeys(), patConfigureExtraClearKeys...)

// Cluster and serverless are mutually exclusive. Clear serverless
// when a cluster is being set (via flag or env var).
if cfg.ClusterID != "" {
clearKeys = append(clearKeys, "serverless_compute_id")
}

// Clear stale unified-host metadata — PAT profiles don't use it,
// and leaving it can change HostType() routing.
clearKeys = append(clearKeys, "experimental_is_unified_host")

return databrickscfg.SaveToProfile(ctx, &config.Config{
Profile: cfg.Profile,
Host: cfg.Host,
Token: cfg.Token,
ClusterID: cfg.ClusterID,
ConfigFile: cfg.ConfigFile,
})
}, clearKeys...)
}

return cmd
Expand Down
Loading