Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package com.databricks.sdk.core;

import com.databricks.sdk.core.error.ApiErrors;
import com.databricks.sdk.core.error.PrivateLinkInfo;
import com.databricks.sdk.core.http.HttpClient;
import com.databricks.sdk.core.http.Request;
import com.databricks.sdk.core.http.Response;
Expand Down Expand Up @@ -261,9 +262,6 @@ private Response executeInner(Request in, String path) {
if (LOG.isDebugEnabled()) {
LOG.debug(makeLogRecord(in, out));
}
if (out.getStatusCode() < 400) {
return out;
}
} catch (IOException e) {
err = e;
LOG.debug("Request {} failed", in, e);
Expand Down Expand Up @@ -297,7 +295,10 @@ private Response executeInner(Request in, String path) {
}

private boolean isRequestSuccessful(Response response, Exception e) {
return e == null && response.getStatusCode() >= 200 && response.getStatusCode() < 300;
return e == null
&& response.getStatusCode() >= 200
&& response.getStatusCode() < 300
&& !PrivateLinkInfo.isPrivateLinkRedirect(response);
}

public long getBackoffMillis(Response response, int attemptNumber) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -531,6 +531,9 @@ public DatabricksConfig setUseSystemPropertiesHttp(Boolean useSystemPropertiesHt
}

public boolean isAzure() {
if (azureWorkspaceResourceId != null) {
return true;
}
return this.getDatabricksEnvironment().getCloud() == Cloud.AZURE;
}

Expand Down Expand Up @@ -602,15 +605,7 @@ public DatabricksEnvironment getDatabricksEnvironment() {
return this.databricksEnvironment;
}

if (this.host != null) {
for (DatabricksEnvironment env : DatabricksEnvironment.ALL_ENVIRONMENTS) {
if (this.host.endsWith(env.getDnsZone())) {
return env;
}
}
}

if (this.azureWorkspaceResourceId != null) {
if (this.host == null && this.azureWorkspaceResourceId != null) {
String azureEnv = "PUBLIC";
if (this.azureEnvironment != null) {
azureEnv = this.azureEnvironment;
Expand All @@ -629,7 +624,7 @@ public DatabricksEnvironment getDatabricksEnvironment() {
}
}

return DatabricksEnvironment.DEFAULT_ENVIRONMENT;
return DatabricksEnvironment.getEnvironmentFromHostname(this.host);
}

public DatabricksConfig newWithWorkspaceHost(String host) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,4 +79,16 @@ public String getDeploymentUrl(String name) {
new DatabricksEnvironment(Cloud.GCP, ".dev.gcp.databricks.com"),
new DatabricksEnvironment(Cloud.GCP, ".staging.gcp.databricks.com"),
new DatabricksEnvironment(Cloud.GCP, ".gcp.databricks.com"));

public static DatabricksEnvironment getEnvironmentFromHostname(String hostname) {
if (hostname == null) {
return DEFAULT_ENVIRONMENT;
}
for (DatabricksEnvironment env : ALL_ENVIRONMENTS) {
if (hostname.endsWith(env.getDnsZone())) {
return env;
}
}
return DEFAULT_ENVIRONMENT;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,18 @@
import com.databricks.sdk.core.utils.ProxyUtils;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.commons.io.IOUtils;
import org.apache.http.HttpEntity;
import org.apache.http.HttpHost;
import org.apache.http.NameValuePair;
import org.apache.http.StatusLine;
import org.apache.http.client.config.RequestConfig;
Expand All @@ -28,6 +33,8 @@
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.apache.http.protocol.BasicHttpContext;
import org.apache.http.protocol.HttpContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -90,11 +97,32 @@ public Response execute(Request in) throws IOException {
request.getParams().setParameter("http.protocol.handle-redirects", false);
}
in.getHeaders().forEach(request::setHeader);
CloseableHttpResponse response = hc.execute(request);
return computeResponse(in, response);
HttpContext context = new BasicHttpContext();
CloseableHttpResponse response = hc.execute(request, context);
return computeResponse(in, context, response);
}

private Response computeResponse(Request in, CloseableHttpResponse response) throws IOException {
private URL getTargetUrl(HttpContext context) {
try {
HttpHost targetHost = (HttpHost) context.getAttribute("http.target_host");
HttpUriRequest request = (HttpUriRequest) context.getAttribute("http.request");
URI uri =
new URI(
targetHost.getSchemeName(),
null,
targetHost.getHostName(),
targetHost.getPort(),
request.getURI().getPath(),
request.getURI().getQuery(),
request.getURI().getFragment());
return uri.toURL();
} catch (MalformedURLException | URISyntaxException e) {
throw new DatabricksException("Unable to get target URL", e);
}
}

private Response computeResponse(Request in, HttpContext context, CloseableHttpResponse response)
throws IOException {
HttpEntity entity = response.getEntity();
StatusLine statusLine = response.getStatusLine();
Map<String, List<String>> hs =
Expand All @@ -103,9 +131,10 @@ private Response computeResponse(Request in, CloseableHttpResponse response) thr
Collectors.groupingBy(
NameValuePair::getName,
Collectors.mapping(NameValuePair::getValue, Collectors.toList())));
URL url = getTargetUrl(context);
if (entity == null) {
response.close();
return new Response(in, statusLine.getStatusCode(), statusLine.getReasonPhrase(), hs);
return new Response(in, url, statusLine.getStatusCode(), statusLine.getReasonPhrase(), hs);
}

// The Databricks SDK is currently designed to treat all non-application/json responses as
Expand Down Expand Up @@ -133,12 +162,13 @@ private Response computeResponse(Request in, CloseableHttpResponse response) thr
}
});
return new Response(
in, statusLine.getStatusCode(), statusLine.getReasonPhrase(), hs, inputStream);
in, url, statusLine.getStatusCode(), statusLine.getReasonPhrase(), hs, inputStream);
}

try (InputStream inputStream = entity.getContent()) {
String body = IOUtils.toString(inputStream, StandardCharsets.UTF_8);
return new Response(in, statusLine.getStatusCode(), statusLine.getReasonPhrase(), hs, body);
return new Response(
in, url, statusLine.getStatusCode(), statusLine.getReasonPhrase(), hs, body);
} finally {
response.close();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,9 @@ public DatabricksError apply(Response resp, ApiErrorBody errorBody) {
if (statusCodeMapping.containsKey(code)) {
return statusCodeMapping.get(code).create(errorCode, message, details);
}
if (PrivateLinkInfo.isPrivateLinkRedirect(resp)) {
return PrivateLinkInfo.createPrivateLinkValidationError(resp);
}
return new DatabricksError(errorCode, message, code, details);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,15 +23,17 @@ public static DatabricksError getDatabricksError(Response out, Exception error)
return new DatabricksError("IO_ERROR", 523, error);
} else if (out.getStatusCode() == 429) {
return new DatabricksError("TOO_MANY_REQUESTS", "Current request has to be retried", 429);
} else if (out.getStatusCode() >= 400) {
return readErrorFromResponse(out);
} else {
// The request succeeded; do not retry.
return new DatabricksError(out.getStatusCode());
}

ApiErrorBody errorBody = readErrorFromResponse(out);
return ERROR_MAPPER.apply(out, errorBody);
}

private static DatabricksError readErrorFromResponse(Response response) {
private static ApiErrorBody readErrorFromResponse(Response response) {
// Private link error handling depends purely on the response URL.
if (PrivateLinkInfo.isPrivateLinkRedirect(response)) {
return new ApiErrorBody();
}
ApiErrorBody errorBody = parseApiError(response);

// Condense API v1.2 and SCIM error string and code into the message and errorCode fields of
Expand All @@ -52,7 +54,7 @@ private static DatabricksError readErrorFromResponse(Response response) {
if (errorBody.getErrorDetails() == null) {
errorBody.setErrorDetails(Collections.emptyList());
}
return ERROR_MAPPER.apply(response, errorBody);
return errorBody;
}

/**
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
package com.databricks.sdk.core.error;

import com.databricks.sdk.core.DatabricksEnvironment;
import com.databricks.sdk.core.http.Response;
import com.databricks.sdk.core.utils.Cloud;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;

public class PrivateLinkInfo {
private final String serviceName;
private final String endpointName;
private final String referencePage;

static final Map<Cloud, PrivateLinkInfo> PRIVATE_LINK_INFOS = loadPrivateLinkInfos();

static Map<Cloud, PrivateLinkInfo> loadPrivateLinkInfos() {
Map<Cloud, PrivateLinkInfo> privateLinkInfoMap = new HashMap<>();
privateLinkInfoMap.put(
Cloud.AWS,
new PrivateLinkInfo(
"AWS PrivateLink",
"AWS VPC endpoint",
"https://docs.databricks.com/en/security/network/classic/privatelink.html"));
privateLinkInfoMap.put(
Cloud.AZURE,
new PrivateLinkInfo(
"Azure Private Link",
"Azure Private Link endpoint",
"https://learn.microsoft.com/en-us/azure/databricks/security/network/classic/private-link-standard#authentication-troubleshooting"));
privateLinkInfoMap.put(
Cloud.GCP,
new PrivateLinkInfo(
"Private Service Connect",
"GCP VPC endpoint",
"https://docs.gcp.databricks.com/en/security/network/classic/private-service-connect.html"));
return privateLinkInfoMap;
}

public PrivateLinkInfo(String serviceName, String endpointName, String referencePage) {
this.serviceName = serviceName;
this.endpointName = endpointName;
this.referencePage = referencePage;
}

public String errorMessage() {
return String.format(
"The requested workspace has %s enabled and is not accessible from the current network. "
+ "Ensure that %s is properly configured and that your device has access to the %s. "
+ "For more information, see %s.",
serviceName, serviceName, endpointName, referencePage);
}

public static boolean isPrivateLinkRedirect(Response resp) {
return resp.getUrl().getPath().equals("/login.html")
&& resp.getUrl().getQuery().contains("error=private-link-validation-error");
}

static PrivateLinkValidationError createPrivateLinkValidationError(Response resp) {
DatabricksEnvironment env =
DatabricksEnvironment.getEnvironmentFromHostname(resp.getUrl().getHost());
PrivateLinkInfo info = PRIVATE_LINK_INFOS.get(env.getCloud());
return new PrivateLinkValidationError(info.errorMessage(), Collections.emptyList());
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
package com.databricks.sdk.core.error;

import com.databricks.sdk.core.error.platform.PermissionDenied;
import java.util.List;

public class PrivateLinkValidationError extends PermissionDenied {
public PrivateLinkValidationError(String message, List<ErrorDetail> details) {
super("PRIVATE_LINK_VALIDATION_ERROR", message, details);
}
}
Loading