Skip to content

HBASE-27118 Add security headers to Thrift/HTTP server (branch-2) #6153

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Aug 14, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,14 @@
*/
package org.apache.hadoop.hbase.http;

import java.util.EnumSet;
import javax.servlet.DispatcherType;
import org.apache.hadoop.conf.Configuration;
import org.apache.yetus.audience.InterfaceAudience;

import org.apache.hbase.thirdparty.org.eclipse.jetty.security.ConstraintMapping;
import org.apache.hbase.thirdparty.org.eclipse.jetty.security.ConstraintSecurityHandler;
import org.apache.hbase.thirdparty.org.eclipse.jetty.servlet.FilterHolder;
import org.apache.hbase.thirdparty.org.eclipse.jetty.servlet.ServletContextHandler;
import org.apache.hbase.thirdparty.org.eclipse.jetty.util.security.Constraint;

Expand All @@ -29,6 +33,9 @@
*/
@InterfaceAudience.Private
public final class HttpServerUtil {

public static final String PATH_SPEC_ANY = "/*";

/**
* Add constraints to a Jetty Context to disallow undesirable Http methods.
* @param ctxHandler The context to modify
Expand Down Expand Up @@ -59,6 +66,24 @@ public static void constrainHttpMethods(ServletContextHandler ctxHandler,
ctxHandler.setSecurityHandler(securityHandler);
}

public static void addClickjackingPreventionFilter(ServletContextHandler ctxHandler,
Configuration conf, String pathSpec) {
FilterHolder holder = new FilterHolder();
holder.setName("clickjackingprevention");
holder.setClassName(ClickjackingPreventionFilter.class.getName());
holder.setInitParameters(ClickjackingPreventionFilter.getDefaultParameters(conf));
ctxHandler.addFilter(holder, pathSpec, EnumSet.allOf(DispatcherType.class));
}

public static void addSecurityHeadersFilter(ServletContextHandler ctxHandler, Configuration conf,
boolean isSecure, String pathSpec) {
FilterHolder holder = new FilterHolder();
holder.setName("securityheaders");
holder.setClassName(SecurityHeadersFilter.class.getName());
holder.setInitParameters(SecurityHeadersFilter.getDefaultParameters(conf, isSecure));
ctxHandler.addFilter(holder, pathSpec, EnumSet.allOf(DispatcherType.class));
}

private HttpServerUtil() {
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@
*/
package org.apache.hadoop.hbase.rest;

import static org.apache.hadoop.hbase.http.HttpServerUtil.PATH_SPEC_ANY;

import java.lang.management.ManagementFactory;
import java.net.UnknownHostException;
import java.util.ArrayList;
Expand All @@ -31,10 +33,8 @@
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.http.ClickjackingPreventionFilter;
import org.apache.hadoop.hbase.http.HttpServerUtil;
import org.apache.hadoop.hbase.http.InfoServer;
import org.apache.hadoop.hbase.http.SecurityHeadersFilter;
import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.rest.filter.AuthFilter;
import org.apache.hadoop.hbase.rest.filter.GzipFilter;
Expand Down Expand Up @@ -99,8 +99,6 @@ public class RESTServer implements Constants {
static final String HTTP_HEADER_CACHE_SIZE = "hbase.rest.http.header.cache.size";
static final int DEFAULT_HTTP_HEADER_CACHE_SIZE = Character.MAX_VALUE - 1;

private static final String PATH_SPEC_ANY = "/*";

static final String REST_HTTP_ALLOW_OPTIONS_METHOD = "hbase.rest.http.allow.options.method";
// HTTP OPTIONS method is commonly used in REST APIs for negotiation. So it is enabled by default.
private static boolean REST_HTTP_ALLOW_OPTIONS_METHOD_DEFAULT = true;
Expand Down Expand Up @@ -144,24 +142,6 @@ void addCSRFFilter(ServletContextHandler ctxHandler, Configuration conf) {
}
}

private void addClickjackingPreventionFilter(ServletContextHandler ctxHandler,
Configuration conf) {
FilterHolder holder = new FilterHolder();
holder.setName("clickjackingprevention");
holder.setClassName(ClickjackingPreventionFilter.class.getName());
holder.setInitParameters(ClickjackingPreventionFilter.getDefaultParameters(conf));
ctxHandler.addFilter(holder, PATH_SPEC_ANY, EnumSet.allOf(DispatcherType.class));
}

private void addSecurityHeadersFilter(ServletContextHandler ctxHandler, Configuration conf,
boolean isSecure) {
FilterHolder holder = new FilterHolder();
holder.setName("securityheaders");
holder.setClassName(SecurityHeadersFilter.class.getName());
holder.setInitParameters(SecurityHeadersFilter.getDefaultParameters(conf, isSecure));
ctxHandler.addFilter(holder, PATH_SPEC_ANY, EnumSet.allOf(DispatcherType.class));
}

// login the server principal (if using secure Hadoop)
private static Pair<FilterHolder, Class<? extends ServletContainer>>
loginServerPrincipal(UserProvider userProvider, Configuration conf) throws Exception {
Expand Down Expand Up @@ -400,8 +380,8 @@ public synchronized void run() throws Exception {
ctxHandler.addFilter(filter, PATH_SPEC_ANY, EnumSet.of(DispatcherType.REQUEST));
}
addCSRFFilter(ctxHandler, conf);
addClickjackingPreventionFilter(ctxHandler, conf);
addSecurityHeadersFilter(ctxHandler, conf, isSecure);
HttpServerUtil.addClickjackingPreventionFilter(ctxHandler, conf, PATH_SPEC_ANY);
HttpServerUtil.addSecurityHeadersFilter(ctxHandler, conf, isSecure, PATH_SPEC_ANY);
HttpServerUtil.constrainHttpMethods(ctxHandler, servlet.getConfiguration()
.getBoolean(REST_HTTP_ALLOW_OPTIONS_METHOD, REST_HTTP_ALLOW_OPTIONS_METHOD_DEFAULT));

Expand Down
5 changes: 5 additions & 0 deletions hbase-thrift/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,11 @@
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk18on</artifactId>
<scope>test</scope>
</dependency>
</dependencies>

<build>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
*/
package org.apache.hadoop.hbase.thrift;

import static org.apache.hadoop.hbase.http.HttpServerUtil.PATH_SPEC_ANY;
import static org.apache.hadoop.hbase.thrift.Constants.BACKLOG_CONF_DEAFULT;
import static org.apache.hadoop.hbase.thrift.Constants.BACKLOG_CONF_KEY;
import static org.apache.hadoop.hbase.thrift.Constants.BIND_CONF_KEY;
Expand Down Expand Up @@ -387,9 +388,12 @@ protected void setupHTTPServer() throws IOException {
httpServer = new Server(threadPool);

// Context handler
boolean isSecure = conf.getBoolean(THRIFT_SSL_ENABLED_KEY, false);
ServletContextHandler ctxHandler =
new ServletContextHandler(httpServer, "/", ServletContextHandler.SESSIONS);
ctxHandler.addServlet(new ServletHolder(thriftHttpServlet), "/*");
HttpServerUtil.addClickjackingPreventionFilter(ctxHandler, conf, PATH_SPEC_ANY);
HttpServerUtil.addSecurityHeadersFilter(ctxHandler, conf, isSecure, PATH_SPEC_ANY);
ctxHandler.addServlet(new ServletHolder(thriftHttpServlet), PATH_SPEC_ANY);
HttpServerUtil.constrainHttpMethods(ctxHandler,
conf.getBoolean(THRIFT_HTTP_ALLOW_OPTIONS_METHOD, THRIFT_HTTP_ALLOW_OPTIONS_METHOD_DEFAULT));

Expand All @@ -404,7 +408,7 @@ protected void setupHTTPServer() throws IOException {
httpConfig.setSendDateHeader(false);

ServerConnector serverConnector;
if (conf.getBoolean(THRIFT_SSL_ENABLED_KEY, false)) {
if (isSecure) {
HttpConfiguration httpsConfig = new HttpConfiguration(httpConfig);
httpsConfig.addCustomizer(new SecureRequestCustomizer());

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,212 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.thrift;

import static org.apache.hadoop.hbase.thrift.TestThriftServerCmdLine.createBoundServer;
import static org.junit.Assert.assertEquals;

import java.io.BufferedInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Method;
import java.net.HttpURLConnection;
import java.nio.file.Files;
import java.security.KeyPair;
import java.security.KeyStore;
import java.security.cert.X509Certificate;
import javax.net.ssl.SSLContext;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.thrift.generated.Hbase;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
import org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge;
import org.apache.hadoop.hbase.util.TableDescriptorChecker;
import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.ssl.SSLContexts;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.protocol.TProtocol;
import org.apache.thrift.transport.TMemoryBuffer;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

@Category({ ClientTests.class, LargeTests.class })
public class TestThriftHttpServerSSL {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestThriftHttpServerSSL.class);

private static final Logger LOG = LoggerFactory.getLogger(TestThriftHttpServerSSL.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final String KEY_STORE_PASSWORD = "myKSPassword";
private static final String TRUST_STORE_PASSWORD = "myTSPassword";

private File keyDir;
private HttpClientBuilder httpClientBuilder;
private ThriftServerRunner tsr;
private HttpPost httpPost = null;

@BeforeClass
public static void setUpBeforeClass() throws Exception {
TEST_UTIL.getConfiguration().setBoolean(Constants.USE_HTTP_CONF_KEY, true);
TEST_UTIL.getConfiguration().setBoolean(TableDescriptorChecker.TABLE_SANITY_CHECKS, false);
TEST_UTIL.startMiniCluster();
// ensure that server time increments every time we do an operation, otherwise
// successive puts having the same timestamp will override each other
EnvironmentEdgeManagerTestHelper.injectEdge(new IncrementingEnvironmentEdge());
}

@AfterClass
public static void tearDownAfterClass() throws Exception {
TEST_UTIL.shutdownMiniCluster();
EnvironmentEdgeManager.reset();
}

@Before
public void setUp() throws Exception {
initializeAlgorithmId();
keyDir = initKeystoreDir();
keyDir.deleteOnExit();
KeyPair keyPair = KeyStoreTestUtil.generateKeyPair("RSA");

X509Certificate serverCertificate =
KeyStoreTestUtil.generateCertificate("CN=localhost, O=server", keyPair, 30, "SHA1withRSA");

generateTrustStore(serverCertificate);
generateKeyStore(keyPair, serverCertificate);

Configuration conf = new Configuration(TEST_UTIL.getConfiguration());
conf.setBoolean(Constants.THRIFT_SSL_ENABLED_KEY, true);
conf.set(Constants.THRIFT_SSL_KEYSTORE_STORE_KEY, getKeystoreFilePath());
conf.set(Constants.THRIFT_SSL_KEYSTORE_PASSWORD_KEY, KEY_STORE_PASSWORD);
conf.set(Constants.THRIFT_SSL_KEYSTORE_KEYPASSWORD_KEY, KEY_STORE_PASSWORD);

tsr = createBoundServer(() -> new ThriftServer(conf));
String url = "https://" + HConstants.LOCALHOST + ":" + tsr.getThriftServer().listenPort;

KeyStore trustStore;
trustStore = KeyStore.getInstance("JKS");
try (InputStream inputStream =
new BufferedInputStream(Files.newInputStream(new File(getTruststoreFilePath()).toPath()))) {
trustStore.load(inputStream, TRUST_STORE_PASSWORD.toCharArray());
}

httpClientBuilder = HttpClients.custom();
SSLContext sslcontext = SSLContexts.custom().loadTrustMaterial(trustStore, null).build();
httpClientBuilder.setSSLContext(sslcontext);

httpPost = new HttpPost(url);
httpPost.setHeader("Content-Type", "application/x-thrift");
httpPost.setHeader("Accept", "application/x-thrift");
httpPost.setHeader("User-Agent", "Java/THttpClient/HC");
}

@After
public void tearDown() throws IOException {
if (httpPost != null) {
httpPost.releaseConnection();
}
if (tsr != null) {
tsr.close();
}
}

@Test
public void testSecurityHeaders() throws Exception {
try (CloseableHttpClient httpClient = httpClientBuilder.build()) {
TMemoryBuffer memoryBuffer = new TMemoryBuffer(100);
TProtocol prot = new TBinaryProtocol(memoryBuffer);
Hbase.Client client = new Hbase.Client(prot);
client.send_getClusterId();

httpPost.setEntity(new ByteArrayEntity(memoryBuffer.getArray()));
CloseableHttpResponse httpResponse = httpClient.execute(httpPost);

assertEquals(HttpURLConnection.HTTP_OK, httpResponse.getStatusLine().getStatusCode());
assertEquals("DENY", httpResponse.getFirstHeader("X-Frame-Options").getValue());

assertEquals("nosniff", httpResponse.getFirstHeader("X-Content-Type-Options").getValue());
assertEquals("1; mode=block", httpResponse.getFirstHeader("X-XSS-Protection").getValue());

assertEquals("default-src https: data: 'unsafe-inline' 'unsafe-eval'",
httpResponse.getFirstHeader("Content-Security-Policy").getValue());
assertEquals("max-age=63072000;includeSubDomains;preload",
httpResponse.getFirstHeader("Strict-Transport-Security").getValue());
}
}

// Workaround for jdk8 292 bug. See https://github.com/bcgit/bc-java/issues/941
// Below is a workaround described in above URL. Issue fingered first in comments in
// HBASE-25920 Support Hadoop 3.3.1
private static void initializeAlgorithmId() {
try {
Class<?> algoId = Class.forName("sun.security.x509.AlgorithmId");
Method method = algoId.getMethod("get", String.class);
method.setAccessible(true);
method.invoke(null, "PBEWithSHA1AndDESede");
} catch (Exception e) {
LOG.warn("failed to initialize AlgorithmId", e);
}
}

private File initKeystoreDir() {
String dataTestDir = TEST_UTIL.getDataTestDir().toString();
File keystoreDir = new File(dataTestDir, TestThriftHttpServer.class.getSimpleName() + "_keys");
keystoreDir.mkdirs();
return keystoreDir;
}

private void generateKeyStore(KeyPair keyPair, X509Certificate serverCertificate)
throws Exception {
String keyStorePath = getKeystoreFilePath();
KeyStoreTestUtil.createKeyStore(keyStorePath, KEY_STORE_PASSWORD, KEY_STORE_PASSWORD,
"serverKS", keyPair.getPrivate(), serverCertificate);
}

private void generateTrustStore(X509Certificate serverCertificate) throws Exception {
String trustStorePath = getTruststoreFilePath();
KeyStoreTestUtil.createTrustStore(trustStorePath, TRUST_STORE_PASSWORD, "serverTS",
serverCertificate);
}

private String getKeystoreFilePath() {
return String.format("%s/serverKS.%s", keyDir.getAbsolutePath(), "jks");
}

private String getTruststoreFilePath() {
return String.format("%s/serverTS.%s", keyDir.getAbsolutePath(), "jks");
}
}