diff --git a/hadoop-assemblies/src/main/resources/assemblies/hadoop-kms-dist.xml b/hadoop-assemblies/src/main/resources/assemblies/hadoop-kms-dist.xml
index 5830bba301186..ff6f99080cafd 100644
--- a/hadoop-assemblies/src/main/resources/assemblies/hadoop-kms-dist.xml
+++ b/hadoop-assemblies/src/main/resources/assemblies/hadoop-kms-dist.xml
@@ -21,6 +21,14 @@
false
+
+
+ target
+ /share/hadoop/common
+
+ ${project.artifactId}-${project.version}.jar
+
+
${basedir}/src/main/conf
@@ -41,7 +49,7 @@
${basedir}/src/main/libexec
/libexec
- *
+ **/*
0755
@@ -51,4 +59,19 @@
/share/doc/hadoop/kms
+
+
+ false
+ /share/hadoop/common/lib
+
+
+ org.apache.hadoop:hadoop-common
+ org.apache.hadoop:hadoop-hdfs
+
+ org.slf4j:slf4j-api
+ org.slf4j:slf4j-log4j12
+ org.hsqldb:hsqldb
+
+
+
diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
index 264d9916c00f6..b10fc844bbb34 100644
--- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
+++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
@@ -142,7 +142,7 @@ public class AuthenticationFilter implements Filter {
private String cookieDomain;
private String cookiePath;
private boolean isCookiePersistent;
- private boolean isInitializedByTomcat;
+ private boolean destroySecretProvider;
/**
*
Initializes the authentication filter and signer secret provider.
@@ -209,7 +209,7 @@ protected void initializeSecretProvider(FilterConfig filterConfig)
secretProvider = constructSecretProvider(
filterConfig.getServletContext(),
config, false);
- isInitializedByTomcat = true;
+ destroySecretProvider = true;
} catch (Exception ex) {
throw new ServletException(ex);
}
@@ -356,7 +356,7 @@ public void destroy() {
authHandler.destroy();
authHandler = null;
}
- if (secretProvider != null && isInitializedByTomcat) {
+ if (secretProvider != null && destroySecretProvider) {
secretProvider.destroy();
secretProvider = null;
}
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
index b6e2b59133bce..3151023441214 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
@@ -262,6 +262,39 @@ function hadoop_deprecate_envvar
fi
}
+## @description Declare `var` being used and print its value.
+## @audience public
+## @stability stable
+## @replaceable yes
+## @param var
+function hadoop_using_envvar
+{
+ local var=$1
+ local val=${!var}
+
+ if [[ -n "${val}" ]]; then
+ hadoop_debug "${var} = ${val}"
+ fi
+}
+
+## @description Create the directory 'dir'.
+## @audience public
+## @stability stable
+## @replaceable yes
+## @param dir
+function hadoop_mkdir
+{
+ local dir=$1
+
+ if [[ ! -w "${dir}" ]] && [[ ! -d "${dir}" ]]; then
+ hadoop_error "WARNING: ${dir} does not exist. Creating."
+ if ! mkdir -p "${dir}"; then
+ hadoop_error "ERROR: Unable to create ${dir}. Aborting."
+ exit 1
+ fi
+ fi
+}
+
## @description Bootstraps the Hadoop shell environment
## @audience private
## @stability evolving
@@ -1396,14 +1429,7 @@ function hadoop_verify_piddir
hadoop_error "No pid directory defined."
exit 1
fi
- if [[ ! -w "${HADOOP_PID_DIR}" ]] && [[ ! -d "${HADOOP_PID_DIR}" ]]; then
- hadoop_error "WARNING: ${HADOOP_PID_DIR} does not exist. Creating."
- mkdir -p "${HADOOP_PID_DIR}" > /dev/null 2>&1
- if [[ $? -gt 0 ]]; then
- hadoop_error "ERROR: Unable to create ${HADOOP_PID_DIR}. Aborting."
- exit 1
- fi
- fi
+ hadoop_mkdir "${HADOOP_PID_DIR}"
touch "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
if [[ $? -gt 0 ]]; then
hadoop_error "ERROR: Unable to write in ${HADOOP_PID_DIR}. Aborting."
@@ -1421,14 +1447,7 @@ function hadoop_verify_logdir
hadoop_error "No log directory defined."
exit 1
fi
- if [[ ! -w "${HADOOP_LOG_DIR}" ]] && [[ ! -d "${HADOOP_LOG_DIR}" ]]; then
- hadoop_error "WARNING: ${HADOOP_LOG_DIR} does not exist. Creating."
- mkdir -p "${HADOOP_LOG_DIR}" > /dev/null 2>&1
- if [[ $? -gt 0 ]]; then
- hadoop_error "ERROR: Unable to create ${HADOOP_LOG_DIR}. Aborting."
- exit 1
- fi
- fi
+ hadoop_mkdir "${HADOOP_LOG_DIR}"
touch "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
if [[ $? -gt 0 ]]; then
hadoop_error "ERROR: Unable to write in ${HADOOP_LOG_DIR}. Aborting."
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigurationWithLogging.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigurationWithLogging.java
new file mode 100644
index 0000000000000..8a5e05462f8a5
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigurationWithLogging.java
@@ -0,0 +1,113 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.conf;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Logs access to {@link Configuration}.
+ * Sensitive data will be redacted.
+ */
+@InterfaceAudience.Private
+public class ConfigurationWithLogging extends Configuration {
+ private static final Logger LOG =
+ LoggerFactory.getLogger(ConfigurationWithLogging.class);
+
+ private final Logger log;
+ private final ConfigRedactor redactor;
+
+ public ConfigurationWithLogging(Configuration conf) {
+ super(conf);
+ log = LOG;
+ redactor = new ConfigRedactor(conf);
+ }
+
+ /**
+ * @see Configuration#get(String).
+ */
+ @Override
+ public String get(String name) {
+ String value = super.get(name);
+ log.info("Got {} = '{}'", name, redactor.redact(name, value));
+ return value;
+ }
+
+ /**
+ * @see Configuration#get(String, String).
+ */
+ @Override
+ public String get(String name, String defaultValue) {
+ String value = super.get(name, defaultValue);
+ log.info("Got {} = '{}' (default '{}')", name,
+ redactor.redact(name, value), redactor.redact(name, defaultValue));
+ return value;
+ }
+
+ /**
+ * @see Configuration#getBoolean(String, boolean).
+ */
+ @Override
+ public boolean getBoolean(String name, boolean defaultValue) {
+ boolean value = super.getBoolean(name, defaultValue);
+ log.info("Got {} = '{}' (default '{}')", name, value, defaultValue);
+ return value;
+ }
+
+ /**
+ * @see Configuration#getFloat(String, float).
+ */
+ @Override
+ public float getFloat(String name, float defaultValue) {
+ float value = super.getFloat(name, defaultValue);
+ log.info("Got {} = '{}' (default '{}')", name, value, defaultValue);
+ return value;
+ }
+
+ /**
+ * @see Configuration#getInt(String, int).
+ */
+ @Override
+ public int getInt(String name, int defaultValue) {
+ int value = super.getInt(name, defaultValue);
+ log.info("Got {} = '{}' (default '{}')", name, value, defaultValue);
+ return value;
+ }
+
+ /**
+ * @see Configuration#getLong(String, long).
+ */
+ @Override
+ public long getLong(String name, long defaultValue) {
+ long value = super.getLong(name, defaultValue);
+ log.info("Got {} = '{}' (default '{}')", name, value, defaultValue);
+ return value;
+ }
+
+ /**
+ * @see Configuration#set(String, String, String).
+ */
+ @Override
+ public void set(String name, String value, String source) {
+ log.info("Set {} to '{}'{}", name, redactor.redact(name, value),
+ source == null ? "" : " from " + source);
+ super.set(name, value, source);
+ }
+}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
index b930f754c55a8..6e215923319ad 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
@@ -17,6 +17,10 @@
*/
package org.apache.hadoop.http;
+import static org.apache.hadoop.fs.CommonConfigurationKeys.DEFAULT_HADOOP_HTTP_STATIC_USER;
+import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_HTTP_STATIC_USER;
+
+import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InterruptedIOException;
@@ -45,7 +49,10 @@
import javax.servlet.http.HttpServletRequestWrapper;
import javax.servlet.http.HttpServletResponse;
+import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Lists;
+import com.sun.jersey.spi.container.servlet.ServletContainer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.HadoopIllegalArgumentException;
@@ -54,14 +61,15 @@
import org.apache.hadoop.conf.ConfServlet;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
-import org.apache.hadoop.security.AuthenticationFilterInitializer;
-import org.apache.hadoop.security.authentication.util.SignerSecretProvider;
import org.apache.hadoop.jmx.JMXJsonServlet;
import org.apache.hadoop.log.LogLevel;
+import org.apache.hadoop.security.AuthenticationFilterInitializer;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
+import org.apache.hadoop.security.authentication.util.SignerSecretProvider;
import org.apache.hadoop.security.authorize.AccessControlList;
+import org.apache.hadoop.security.ssl.SSLFactory;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.Shell;
import org.eclipse.jetty.http.HttpVersion;
@@ -90,16 +98,9 @@
import org.eclipse.jetty.servlet.ServletMapping;
import org.eclipse.jetty.util.ArrayUtil;
import org.eclipse.jetty.util.MultiException;
-import org.eclipse.jetty.webapp.WebAppContext;
-import org.eclipse.jetty.util.thread.QueuedThreadPool;
-
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-import com.sun.jersey.spi.container.servlet.ServletContainer;
import org.eclipse.jetty.util.ssl.SslContextFactory;
-
-import static org.apache.hadoop.fs.CommonConfigurationKeys.DEFAULT_HADOOP_HTTP_STATIC_USER;
-import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_HTTP_STATIC_USER;
+import org.eclipse.jetty.util.thread.QueuedThreadPool;
+import org.eclipse.jetty.webapp.WebAppContext;
/**
* Create a Jetty embedded server to answer http requests. The primary goal is
@@ -116,9 +117,22 @@
public final class HttpServer2 implements FilterContainer {
public static final Log LOG = LogFactory.getLog(HttpServer2.class);
+ public static final String HTTP_SCHEME = "http";
+ public static final String HTTPS_SCHEME = "https";
+
+ public static final String HTTP_MAX_REQUEST_HEADER_SIZE_KEY =
+ "hadoop.http.max.request.header.size";
+ public static final int HTTP_MAX_REQUEST_HEADER_SIZE_DEFAULT = 65536;
+ public static final String HTTP_MAX_RESPONSE_HEADER_SIZE_KEY =
+ "hadoop.http.max.response.header.size";
+ public static final int HTTP_MAX_RESPONSE_HEADER_SIZE_DEFAULT = 65536;
+ public static final String HTTP_MAX_THREADS_KEY = "hadoop.http.max.threads";
+ public static final String HTTP_TEMP_DIR_KEY = "hadoop.http.temp.dir";
+
static final String FILTER_INITIALIZER_PROPERTY
= "hadoop.http.filter.initializers";
- public static final String HTTP_MAX_THREADS = "hadoop.http.max.threads";
+ @Deprecated
+ public static final String HTTP_MAX_THREADS = HTTP_MAX_THREADS_KEY;
// The ServletContext attribute where the daemon Configuration
// gets stored.
@@ -158,6 +172,7 @@ public static class Builder {
private ArrayList endpoints = Lists.newArrayList();
private String name;
private Configuration conf;
+ private Configuration sslConf;
private String[] pathSpecs;
private AccessControlList adminsAcl;
private boolean securityEnabled = false;
@@ -253,6 +268,15 @@ public Builder setConf(Configuration conf) {
return this;
}
+ /**
+ * Specify the SSL configuration to load. This API provides an alternative
+ * to keyStore/keyPassword/trustStore.
+ */
+ public Builder setSSLConf(Configuration sslCnf) {
+ this.sslConf = sslCnf;
+ return this;
+ }
+
public Builder setPathSpec(String[] pathSpec) {
this.pathSpecs = pathSpec;
return this;
@@ -315,7 +339,45 @@ public Builder setXFrameOption(String option) {
return this;
}
+ /**
+ * A wrapper of {@link Configuration#getPassword(String)}. It returns
+ * String
instead of char[]
and throws
+ * {@link IOException} when the password not found.
+ *
+ * @param conf the configuration
+ * @param name the property name
+ * @return the password string
+ */
+ private static String getPassword(Configuration conf, String name)
+ throws IOException {
+ char[] passchars = conf.getPassword(name);
+ if (passchars == null) {
+ throw new IOException("Password " + name + " not found");
+ }
+ return new String(passchars);
+ }
+ /**
+ * Load SSL properties from the SSL configuration.
+ */
+ private void loadSSLConfiguration() throws IOException {
+ if (sslConf == null) {
+ return;
+ }
+ needsClientAuth(sslConf.getBoolean(
+ SSLFactory.SSL_SERVER_NEED_CLIENT_AUTH,
+ SSLFactory.SSL_SERVER_NEED_CLIENT_AUTH_DEFAULT));
+ keyStore(sslConf.get(SSLFactory.SSL_SERVER_KEYSTORE_LOCATION),
+ getPassword(sslConf, SSLFactory.SSL_SERVER_KEYSTORE_PASSWORD),
+ sslConf.get(SSLFactory.SSL_SERVER_KEYSTORE_TYPE,
+ SSLFactory.SSL_SERVER_KEYSTORE_TYPE_DEFAULT));
+ keyPassword(getPassword(sslConf,
+ SSLFactory.SSL_SERVER_KEYSTORE_KEYPASSWORD));
+ trustStore(sslConf.get(SSLFactory.SSL_SERVER_TRUSTSTORE_LOCATION),
+ getPassword(sslConf, SSLFactory.SSL_SERVER_TRUSTSTORE_PASSWORD),
+ sslConf.get(SSLFactory.SSL_SERVER_TRUSTSTORE_TYPE,
+ SSLFactory.SSL_SERVER_TRUSTSTORE_TYPE_DEFAULT));
+ }
public HttpServer2 build() throws IOException {
Preconditions.checkNotNull(name, "name is not set");
@@ -335,15 +397,33 @@ public HttpServer2 build() throws IOException {
server.initSpnego(conf, hostName, usernameConfKey, keytabConfKey);
}
+ for (URI ep : endpoints) {
+ if (HTTPS_SCHEME.equals(ep.getScheme())) {
+ loadSSLConfiguration();
+ break;
+ }
+ }
+
+ int requestHeaderSize = conf.getInt(
+ HTTP_MAX_REQUEST_HEADER_SIZE_KEY,
+ HTTP_MAX_REQUEST_HEADER_SIZE_DEFAULT);
+ int responseHeaderSize = conf.getInt(
+ HTTP_MAX_RESPONSE_HEADER_SIZE_KEY,
+ HTTP_MAX_RESPONSE_HEADER_SIZE_DEFAULT);
+
+ HttpConfiguration httpConfig = new HttpConfiguration();
+ httpConfig.setRequestHeaderSize(requestHeaderSize);
+ httpConfig.setResponseHeaderSize(responseHeaderSize);
+
for (URI ep : endpoints) {
final ServerConnector connector;
String scheme = ep.getScheme();
- if ("http".equals(scheme)) {
- connector =
- HttpServer2.createDefaultChannelConnector(server.webServer);
- } else if ("https".equals(scheme)) {
- connector = createHttpsChannelConnector(server.webServer);
-
+ if (HTTP_SCHEME.equals(scheme)) {
+ connector = createHttpChannelConnector(server.webServer,
+ httpConfig);
+ } else if (HTTPS_SCHEME.equals(scheme)) {
+ connector = createHttpsChannelConnector(server.webServer,
+ httpConfig);
} else {
throw new HadoopIllegalArgumentException(
"unknown scheme for endpoint:" + ep);
@@ -356,16 +436,20 @@ public HttpServer2 build() throws IOException {
return server;
}
- private ServerConnector createHttpsChannelConnector(Server server) {
+ private ServerConnector createHttpChannelConnector(
+ Server server, HttpConfiguration httpConfig) {
ServerConnector conn = new ServerConnector(server);
- HttpConfiguration httpConfig = new HttpConfiguration();
- httpConfig.setRequestHeaderSize(JettyUtils.HEADER_SIZE);
- httpConfig.setResponseHeaderSize(JettyUtils.HEADER_SIZE);
- httpConfig.setSecureScheme("https");
- httpConfig.addCustomizer(new SecureRequestCustomizer());
ConnectionFactory connFactory = new HttpConnectionFactory(httpConfig);
conn.addConnectionFactory(connFactory);
configureChannelConnector(conn);
+ return conn;
+ }
+
+ private ServerConnector createHttpsChannelConnector(
+ Server server, HttpConfiguration httpConfig) {
+ httpConfig.setSecureScheme(HTTPS_SCHEME);
+ httpConfig.addCustomizer(new SecureRequestCustomizer());
+ ServerConnector conn = createHttpChannelConnector(server, httpConfig);
SslContextFactory sslContextFactory = new SslContextFactory();
sslContextFactory.setNeedClientAuth(needsClientAuth);
@@ -397,7 +481,7 @@ private HttpServer2(final Builder b) throws IOException {
this.webServer = new Server();
this.adminsAcl = b.adminsAcl;
this.handlers = new HandlerCollection();
- this.webAppContext = createWebAppContext(b.name, b.conf, adminsAcl, appDir);
+ this.webAppContext = createWebAppContext(b, adminsAcl, appDir);
this.xFrameOptionIsEnabled = b.xFrameEnabled;
this.xFrameOption = b.xFrameOption;
@@ -482,8 +566,8 @@ private void addListener(ServerConnector connector) {
listeners.add(connector);
}
- private static WebAppContext createWebAppContext(String name,
- Configuration conf, AccessControlList adminsAcl, final String appDir) {
+ private static WebAppContext createWebAppContext(Builder b,
+ AccessControlList adminsAcl, final String appDir) {
WebAppContext ctx = new WebAppContext();
ctx.setDefaultsDescriptor(null);
ServletHolder holder = new ServletHolder(new DefaultServlet());
@@ -496,10 +580,15 @@ private static WebAppContext createWebAppContext(String name,
holder.setInitParameters(params);
ctx.setWelcomeFiles(new String[] {"index.html"});
ctx.addServlet(holder, "/");
- ctx.setDisplayName(name);
+ ctx.setDisplayName(b.name);
ctx.setContextPath("/");
- ctx.setWar(appDir + "/" + name);
- ctx.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf);
+ ctx.setWar(appDir + "/" + b.name);
+ String tempDirectory = b.conf.get(HTTP_TEMP_DIR_KEY);
+ if (tempDirectory != null && !tempDirectory.isEmpty()) {
+ ctx.setTempDirectory(new File(tempDirectory));
+ ctx.setAttribute("javax.servlet.context.tempdir", tempDirectory);
+ }
+ ctx.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, b.conf);
ctx.getServletContext().setAttribute(ADMINS_ACL, adminsAcl);
addNoCacheFilter(ctx);
return ctx;
@@ -541,18 +630,6 @@ private static void configureChannelConnector(ServerConnector c) {
}
}
- @InterfaceAudience.Private
- public static ServerConnector createDefaultChannelConnector(Server server) {
- ServerConnector conn = new ServerConnector(server);
- HttpConfiguration httpConfig = new HttpConfiguration();
- httpConfig.setRequestHeaderSize(JettyUtils.HEADER_SIZE);
- httpConfig.setResponseHeaderSize(JettyUtils.HEADER_SIZE);
- ConnectionFactory connFactory = new HttpConnectionFactory(httpConfig);
- conn.addConnectionFactory(connFactory);
- configureChannelConnector(conn);
- return conn;
- }
-
/** Get an array of FilterConfiguration specified in the conf */
private static FilterInitializer[] getFilterInitializers(Configuration conf) {
if (conf == null) {
@@ -1056,7 +1133,7 @@ public void stop() throws Exception {
}
try {
- // explicitly destroy the secrete provider
+ // explicitly destroy the secret provider
secretProvider.destroy();
// clear & stop webAppContext attributes to avoid memory leaks.
webAppContext.clearAttributes();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java
index 95cba80989482..cda26a56c5ea5 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java
@@ -60,27 +60,61 @@ public class SSLFactory implements ConnectionConfigurator {
@InterfaceAudience.Private
public static enum Mode { CLIENT, SERVER }
+ public static final String SSL_CLIENT_CONF_KEY = "hadoop.ssl.client.conf";
+ public static final String SSL_CLIENT_CONF_DEFAULT = "ssl-client.xml";
+ public static final String SSL_SERVER_CONF_KEY = "hadoop.ssl.server.conf";
+ public static final String SSL_SERVER_CONF_DEFAULT = "ssl-server.xml";
+
public static final String SSL_REQUIRE_CLIENT_CERT_KEY =
- "hadoop.ssl.require.client.cert";
+ "hadoop.ssl.require.client.cert";
+ public static final boolean SSL_REQUIRE_CLIENT_CERT_DEFAULT = false;
public static final String SSL_HOSTNAME_VERIFIER_KEY =
- "hadoop.ssl.hostname.verifier";
- public static final String SSL_CLIENT_CONF_KEY =
- "hadoop.ssl.client.conf";
- public static final String SSL_SERVER_CONF_KEY =
- "hadoop.ssl.server.conf";
- public static final String SSLCERTIFICATE = IBM_JAVA?"ibmX509":"SunX509";
+ "hadoop.ssl.hostname.verifier";
+ public static final String SSL_ENABLED_PROTOCOLS_KEY =
+ "hadoop.ssl.enabled.protocols";
+ public static final String SSL_ENABLED_PROTOCOLS_DEFAULT =
+ "TLSv1,SSLv2Hello,TLSv1.1,TLSv1.2";
+
+ public static final String SSL_SERVER_NEED_CLIENT_AUTH =
+ "ssl.server.need.client.auth";
+ public static final boolean SSL_SERVER_NEED_CLIENT_AUTH_DEFAULT = false;
+
+ public static final String SSL_SERVER_KEYSTORE_LOCATION =
+ "ssl.server.keystore.location";
+ public static final String SSL_SERVER_KEYSTORE_PASSWORD =
+ "ssl.server.keystore.password";
+ public static final String SSL_SERVER_KEYSTORE_TYPE =
+ "ssl.server.keystore.type";
+ public static final String SSL_SERVER_KEYSTORE_TYPE_DEFAULT = "jks";
+ public static final String SSL_SERVER_KEYSTORE_KEYPASSWORD =
+ "ssl.server.keystore.keypassword";
+
+ public static final String SSL_SERVER_TRUSTSTORE_LOCATION =
+ "ssl.server.truststore.location";
+ public static final String SSL_SERVER_TRUSTSTORE_PASSWORD =
+ "ssl.server.truststore.password";
+ public static final String SSL_SERVER_TRUSTSTORE_TYPE =
+ "ssl.server.truststore.type";
+ public static final String SSL_SERVER_TRUSTSTORE_TYPE_DEFAULT = "jks";
+
+ public static final String SSL_SERVER_EXCLUDE_CIPHER_LIST =
+ "ssl.server.exclude.cipher.list";
- public static final boolean DEFAULT_SSL_REQUIRE_CLIENT_CERT = false;
+ @Deprecated
+ public static final boolean DEFAULT_SSL_REQUIRE_CLIENT_CERT =
+ SSL_REQUIRE_CLIENT_CERT_DEFAULT;
+
+ public static final String SSLCERTIFICATE = IBM_JAVA?"ibmX509":"SunX509";
public static final String KEYSTORES_FACTORY_CLASS_KEY =
"hadoop.ssl.keystores.factory.class";
+ @Deprecated
public static final String SSL_ENABLED_PROTOCOLS =
- "hadoop.ssl.enabled.protocols";
+ SSL_ENABLED_PROTOCOLS_KEY;
+ @Deprecated
public static final String DEFAULT_SSL_ENABLED_PROTOCOLS =
- "TLSv1,SSLv2Hello,TLSv1.1,TLSv1.2";
- public static final String SSL_SERVER_EXCLUDE_CIPHER_LIST =
- "ssl.server.exclude.cipher.list";
+ SSL_ENABLED_PROTOCOLS_DEFAULT;
private Configuration conf;
private Mode mode;
@@ -131,9 +165,11 @@ private Configuration readSSLConfiguration(Mode mode) {
sslConf.setBoolean(SSL_REQUIRE_CLIENT_CERT_KEY, requireClientCert);
String sslConfResource;
if (mode == Mode.CLIENT) {
- sslConfResource = conf.get(SSL_CLIENT_CONF_KEY, "ssl-client.xml");
+ sslConfResource = conf.get(SSL_CLIENT_CONF_KEY,
+ SSL_CLIENT_CONF_DEFAULT);
} else {
- sslConfResource = conf.get(SSL_SERVER_CONF_KEY, "ssl-server.xml");
+ sslConfResource = conf.get(SSL_SERVER_CONF_KEY,
+ SSL_SERVER_CONF_DEFAULT);
}
sslConf.addResource(sslConfResource);
return sslConf;
diff --git a/hadoop-common-project/hadoop-common/src/site/markdown/CommandsManual.md b/hadoop-common-project/hadoop-common/src/site/markdown/CommandsManual.md
index 27a858a66329d..8d98e912c2137 100644
--- a/hadoop-common-project/hadoop-common/src/site/markdown/CommandsManual.md
+++ b/hadoop-common-project/hadoop-common/src/site/markdown/CommandsManual.md
@@ -207,6 +207,12 @@ NOTE: Some KeyProviders (e.g. org.apache.hadoop.crypto.key.JavaKeyStoreProvider)
NOTE: Some KeyProviders do not directly execute a key deletion (e.g. performs a soft-delete instead, or delay the actual deletion, to prevent mistake). In these cases, one may encounter errors when creating/deleting a key with the same name after deleting it. Please check the underlying KeyProvider for details.
+### `kms`
+
+Usage: `hadoop kms`
+
+Run KMS, the Key Management Server.
+
### `trace`
View and modify Hadoop tracing settings. See the [Tracing Guide](./Tracing.html).
@@ -267,8 +273,6 @@ This command works by sending a HTTP/HTTPS request to the daemon's internal Jett
* node manager
* Timeline server
-However, the command does not support KMS server, because its web interface is based on Tomcat, which does not support the servlet.
-
Files
-----
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_mkdir.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_mkdir.bats
new file mode 100644
index 0000000000000..90a4f1ab4693a
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_mkdir.bats
@@ -0,0 +1,42 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_mkdir (create)" {
+ DIR=${BATS_TMPDIR}/nodir
+ rm -fr ${DIR}
+ run hadoop_mkdir ${DIR}
+ [ "${status}" = 0 ]
+ [ "${output}" = "WARNING: ${DIR} does not exist. Creating." ]
+}
+
+
+@test "hadoop_mkdir (exists)" {
+ DIR=${BATS_TMPDIR}/exists
+ mkdir -p ${DIR}
+ run hadoop_mkdir ${DIR}
+ [ "${status}" = 0 ]
+ [ -z "${output}" ]
+}
+
+
+@test "hadoop_mkdir (failed)" {
+ DIR=${BATS_TMPDIR}/readonly_dir/dir
+ mkdir -p ${BATS_TMPDIR}/readonly_dir
+ chmod a-w ${BATS_TMPDIR}/readonly_dir
+ run hadoop_mkdir ${DIR}
+ [ "${status}" != 0 ]
+}
\ No newline at end of file
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_using_envvar.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_using_envvar.bats
new file mode 100644
index 0000000000000..8f8e93730a2d8
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_using_envvar.bats
@@ -0,0 +1,33 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_using_envvar (has value)" {
+ HADOOP_SHELL_SCRIPT_DEBUG=true
+ VAR=value
+ run hadoop_using_envvar VAR
+ [ "${status}" = 0 ]
+ [ "${output}" = "DEBUG: VAR = value" ]
+}
+
+
+@test "hadoop_using_envvar (no value)" {
+ HADOOP_SHELL_SCRIPT_DEBUG=true
+ VAR=
+ run hadoop_using_envvar VAR
+ [ "${status}" = 0 ]
+ [ -z "${output}" ]
+}
\ No newline at end of file
diff --git a/hadoop-common-project/hadoop-kms/dev-support/findbugsExcludeFile.xml b/hadoop-common-project/hadoop-kms/dev-support/findbugsExcludeFile.xml
index 929936dad3130..f864c03145b22 100644
--- a/hadoop-common-project/hadoop-kms/dev-support/findbugsExcludeFile.xml
+++ b/hadoop-common-project/hadoop-kms/dev-support/findbugsExcludeFile.xml
@@ -39,7 +39,7 @@
diff --git a/hadoop-common-project/hadoop-kms/pom.xml b/hadoop-common-project/hadoop-kms/pom.xml
index 73b8339052cd8..41c36e8e00500 100644
--- a/hadoop-common-project/hadoop-kms/pom.xml
+++ b/hadoop-common-project/hadoop-kms/pom.xml
@@ -27,20 +27,11 @@
hadoop-kms
3.0.0-alpha2-SNAPSHOT
- war
+ jar
Apache Hadoop KMS
Apache Hadoop KMS
-
-
- ${project.build.directory}/${project.artifactId}-${project.version}/share/hadoop/kms/tomcat
-
-
- http://archive.apache.org/dist/tomcat/tomcat-6/v${tomcat.version}/bin/apache-tomcat-${tomcat.version}.tar.gz
-
-
-
org.apache.hadoop
@@ -80,12 +71,14 @@
javax.servlet
javax.servlet-api
- provided
org.eclipse.jetty
jetty-server
- test
+
+
+ org.eclipse.jetty
+ jetty-webapp
org.apache.hadoop
@@ -100,14 +93,6 @@
commons-httpclient
commons-httpclient
-
- tomcat
- jasper-compiler
-
-
- tomcat
- jasper-runtime
-
javax.servlet
javax.servlet-api
@@ -229,66 +214,21 @@
maven-antrun-plugin
- create-web-xmls
- generate-test-resources
+ site
+ site
run
-
-
-
-
-
+
-
-
- org.apache.maven.plugins
- maven-war-plugin
-
-
- default-war
- prepare-package
-
- war
-
-
- true
- kms
- ${project.build.directory}/kms
-
-
-
-
-
-
- org.apache.maven.plugins
- maven-jar-plugin
-
-
- prepare-jar
- prepare-package
-
- jar
-
-
- classes
-
-
-
- prepare-test-jar
- prepare-package
-
- test-jar
-
-
-
-
-
+
org.codehaus.mojo
findbugs-maven-plugin
@@ -360,84 +300,6 @@
-
-
- org.apache.maven.plugins
- maven-antrun-plugin
-
-
- dist
-
- run
-
- package
-
-
-
-
-
-
-
-
-
- cd "${project.build.directory}/tomcat.exp"
- gzip -cd ../../downloads/apache-tomcat-${tomcat.version}.tar.gz | tar xf -
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- tar
- package
-
- run
-
-
-
-
-
- cd "${project.build.directory}"
- tar cf - ${project.artifactId}-${project.version} | gzip > ${project.artifactId}-${project.version}.tar.gz
-
-
-
-
-
-
-
-
-
diff --git a/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh b/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh
index e42904d185edd..0528932c2700f 100644
--- a/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh
+++ b/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh
@@ -18,6 +18,14 @@
# hadoop-env.sh is read prior to this file.
#
+# KMS config directory
+#
+# export KMS_CONFIG=${HADOOP_CONF_DIR}
+
+# KMS log directory
+#
+# export KMS_LOG=${HADOOP_LOG_DIR}
+
# KMS temporary directory
#
# export KMS_TEMP=${HADOOP_HOME}/temp
@@ -26,48 +34,22 @@
#
# export KMS_HTTP_PORT=9600
-# The Admin port used by KMS
-#
-# export KMS_ADMIN_PORT=$((KMS_HTTP_PORT + 1))
-
-# The maximum number of Tomcat handler threads
+# The maximum number of HTTP handler threads
#
# export KMS_MAX_THREADS=1000
-# The maximum size of Tomcat HTTP header
+# The maximum size of HTTP header
#
# export KMS_MAX_HTTP_HEADER_SIZE=65536
+# Whether SSL is enabled
+#
+# export KMS_SSL_ENABLED=false
+
# The location of the SSL keystore if using SSL
#
# export KMS_SSL_KEYSTORE_FILE=${HOME}/.keystore
-#
# The password of the SSL keystore if using SSL
#
-# export KMS_SSL_KEYSTORE_PASS=password
-
-
-##
-## Tomcat specific settings
-##
-#
-# Location of tomcat
-#
-# export KMS_CATALINA_HOME=${HADOOP_HOME}/share/hadoop/kms/tomcat
-
-# Java System properties for KMS should be specified in this variable.
-# The java.library.path and hadoop.home.dir properties are automatically
-# configured. In order to supplement java.library.path,
-# one should add to the JAVA_LIBRARY_PATH env var.
-#
-# export CATALINA_OPTS=
-
-# PID file
-#
-# export CATALINA_PID=${HADOOP_PID_DIR}/hadoop-${HADOOP_IDENT_STRING}-kms.pid
-
-# Output file
-#
-# export CATALINA_OUT=${KMS_LOG}/hadoop-${HADOOP_IDENT_STRING}-kms-${HOSTNAME}.out
-
+# export KMS_SSL_KEYSTORE_PASS=password
\ No newline at end of file
diff --git a/hadoop-common-project/hadoop-kms/src/main/conf/kms-log4j.properties b/hadoop-common-project/hadoop-kms/src/main/conf/kms-log4j.properties
index 8e6d909950225..15ff436090597 100644
--- a/hadoop-common-project/hadoop-kms/src/main/conf/kms-log4j.properties
+++ b/hadoop-common-project/hadoop-kms/src/main/conf/kms-log4j.properties
@@ -32,7 +32,6 @@ log4j.appender.kms-audit.layout.ConversionPattern=%d{ISO8601} %m%n
log4j.logger.kms-audit=INFO, kms-audit
log4j.additivity.kms-audit=false
-log4j.rootLogger=ALL, kms
-log4j.logger.org.apache.hadoop.conf=ERROR
+log4j.rootLogger=INFO, kms
log4j.logger.org.apache.hadoop=INFO
log4j.logger.com.sun.jersey.server.wadl.generators.WadlGeneratorJAXBGrammarGenerator=OFF
\ No newline at end of file
diff --git a/hadoop-common-project/hadoop-kms/src/main/conf/kms-site.xml b/hadoop-common-project/hadoop-kms/src/main/conf/kms-site.xml
index d188735227d77..85e71c331d7fe 100644
--- a/hadoop-common-project/hadoop-kms/src/main/conf/kms-site.xml
+++ b/hadoop-common-project/hadoop-kms/src/main/conf/kms-site.xml
@@ -12,172 +12,9 @@
See the License for the specific language governing permissions and
limitations under the License.
-->
-
-
-
-
-
- hadoop.kms.key.provider.uri
- jceks://file@/${user.home}/kms.keystore
-
- URI of the backing KeyProvider for the KMS.
-
-
-
-
- hadoop.security.keystore.java-keystore-provider.password-file
- kms.keystore.password
-
- If using the JavaKeyStoreProvider, the file name for the keystore password.
-
-
-
-
-
-
- hadoop.kms.cache.enable
- true
-
- Whether the KMS will act as a cache for the backing KeyProvider.
- When the cache is enabled, operations like getKeyVersion, getMetadata,
- and getCurrentKey will sometimes return cached data without consulting
- the backing KeyProvider. Cached values are flushed when keys are deleted
- or modified.
-
-
-
-
- hadoop.kms.cache.timeout.ms
- 600000
-
- Expiry time for the KMS key version and key metadata cache, in
- milliseconds. This affects getKeyVersion and getMetadata.
-
-
-
-
- hadoop.kms.current.key.cache.timeout.ms
- 30000
-
- Expiry time for the KMS current key cache, in milliseconds. This
- affects getCurrentKey operations.
-
-
-
-
-
-
- hadoop.kms.audit.aggregation.window.ms
- 10000
-
- Duplicate audit log events within the aggregation window (specified in
- ms) are quashed to reduce log traffic. A single message for aggregated
- events is printed at the end of the window, along with a count of the
- number of aggregated events.
-
-
-
-
-
-
- hadoop.kms.authentication.type
- simple
-
- Authentication type for the KMS. Can be either "simple"
- or "kerberos".
-
-
-
- hadoop.kms.authentication.kerberos.keytab
- ${user.home}/kms.keytab
-
- Path to the keytab with credentials for the configured Kerberos principal.
-
-
+
-
- hadoop.kms.authentication.kerberos.principal
- HTTP/localhost
-
- The Kerberos principal to use for the HTTP endpoint.
- The principal must start with 'HTTP/' as per the Kerberos HTTP SPNEGO specification.
-
-
-
-
- hadoop.kms.authentication.kerberos.name.rules
- DEFAULT
-
- Rules used to resolve Kerberos principal names.
-
-
-
-
-
-
- hadoop.kms.authentication.signer.secret.provider
- random
-
- Indicates how the secret to sign the authentication cookies will be
- stored. Options are 'random' (default), 'string' and 'zookeeper'.
- If using a setup with multiple KMS instances, 'zookeeper' should be used.
-
-
-
-
-
-
- hadoop.kms.authentication.signer.secret.provider.zookeeper.path
- /hadoop-kms/hadoop-auth-signature-secret
-
- The Zookeeper ZNode path where the KMS instances will store and retrieve
- the secret from.
-
-
-
-
- hadoop.kms.authentication.signer.secret.provider.zookeeper.connection.string
- #HOSTNAME#:#PORT#,...
-
- The Zookeeper connection string, a list of hostnames and port comma
- separated.
-
-
-
-
- hadoop.kms.authentication.signer.secret.provider.zookeeper.auth.type
- none
-
- The Zookeeper authentication type, 'none' (default) or 'sasl' (Kerberos).
-
-
-
-
- hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.keytab
- /etc/hadoop/conf/kms.keytab
-
- The absolute path for the Kerberos keytab with the credentials to
- connect to Zookeeper.
-
-
-
-
- hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.principal
- kms/#HOSTNAME#
-
- The Kerberos service principal used to connect to Zookeeper.
-
-
-
-
- hadoop.kms.audit.logger
- org.apache.hadoop.crypto.key.kms.server.SimpleKMSAuditLogger
-
- The audit logger for KMS. It is a comma-separated list of KMSAuditLogger
- class names. Default is the text-format SimpleKMSAuditLogger only.
- If this is not configured, default will be used.
-
-
+
diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSConfiguration.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSConfiguration.java
index 600f1e976bf3b..1ef6c4e8f2fca 100644
--- a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSConfiguration.java
+++ b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSConfiguration.java
@@ -32,6 +32,7 @@
public class KMSConfiguration {
public static final String KMS_CONFIG_DIR = "kms.config.dir";
+ public static final String KMS_DEFAULT_XML = "kms-default.xml";
public static final String KMS_SITE_XML = "kms-site.xml";
public static final String KMS_ACLS_XML = "kms-acls.xml";
@@ -42,6 +43,16 @@ public class KMSConfiguration {
public static final String DEFAULT_KEY_ACL_PREFIX = "default.key.acl.";
public static final String WHITELIST_KEY_ACL_PREFIX = "whitelist.key.acl.";
+ // HTTP properties
+ public static final String HTTP_PORT_KEY = "hadoop.kms.http.port";
+ public static final int HTTP_PORT_DEFAULT = 9600;
+ public static final String HTTP_HOST_KEY = "hadoop.kms.http.host";
+ public static final String HTTP_HOST_DEFAULT = "0.0.0.0";
+
+ // SSL properties
+ public static final String SSL_ENABLED_KEY = "hadoop.kms.ssl.enabled";
+ public static final boolean SSL_ENABLED_DEFAULT = false;
+
// Property to set the backing KeyProvider
public static final String KEY_PROVIDER_URI = CONFIG_PREFIX +
"key.provider.uri";
@@ -77,6 +88,11 @@ public class KMSConfiguration {
public static final boolean KEY_AUTHORIZATION_ENABLE_DEFAULT = true;
+ static {
+ Configuration.addDefaultResource(KMS_DEFAULT_XML);
+ Configuration.addDefaultResource(KMS_SITE_XML);
+ }
+
static Configuration getConfiguration(boolean loadHadoopDefaults,
String ... resources) {
Configuration conf = new Configuration(loadHadoopDefaults);
diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJMXServlet.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJMXServlet.java
deleted file mode 100644
index 6918015a90eb1..0000000000000
--- a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJMXServlet.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.crypto.key.kms.server;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.jmx.JMXJsonServlet;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import java.io.IOException;
-
-@InterfaceAudience.Private
-public class KMSJMXServlet extends JMXJsonServlet {
-
- @Override
- protected boolean isInstrumentationAccessAllowed(HttpServletRequest request,
- HttpServletResponse response) throws IOException {
- return true;
- }
-}
diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebApp.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebApp.java
index 40ae19f4c0871..857139fd59e94 100644
--- a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebApp.java
+++ b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebApp.java
@@ -17,10 +17,17 @@
*/
package org.apache.hadoop.crypto.key.kms.server;
+import java.io.File;
+import java.io.IOException;
+import java.net.URI;
+import java.net.URL;
+
+import javax.servlet.ServletContextEvent;
+import javax.servlet.ServletContextListener;
+
import com.codahale.metrics.JmxReporter;
import com.codahale.metrics.Meter;
import com.codahale.metrics.MetricRegistry;
-
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.key.CachingKeyProvider;
@@ -34,15 +41,6 @@
import org.apache.log4j.PropertyConfigurator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.slf4j.bridge.SLF4JBridgeHandler;
-
-import javax.servlet.ServletContextEvent;
-import javax.servlet.ServletContextListener;
-
-import java.io.File;
-import java.io.IOException;
-import java.net.URI;
-import java.net.URL;
@InterfaceAudience.Private
public class KMSWebApp implements ServletContextListener {
@@ -81,11 +79,6 @@ public class KMSWebApp implements ServletContextListener {
private static KMSAudit kmsAudit;
private static KeyProviderCryptoExtension keyProviderCryptoExtension;
- static {
- SLF4JBridgeHandler.removeHandlersForRootLogger();
- SLF4JBridgeHandler.install();
- }
-
private void initLogging(String confDir) {
if (System.getProperty("log4j.configuration") == null) {
System.setProperty("log4j.defaultInitOverride", "true");
diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebServer.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebServer.java
new file mode 100644
index 0000000000000..70945cb2c10a5
--- /dev/null
+++ b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebServer.java
@@ -0,0 +1,155 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.crypto.key.kms.server;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.net.MalformedURLException;
+import java.net.URI;
+import java.net.URL;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.conf.ConfigurationWithLogging;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.http.HttpServer2;
+import org.apache.hadoop.security.ssl.SSLFactory;
+import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * The KMS web server.
+ */
+@InterfaceAudience.Private
+public class KMSWebServer {
+ private static final Logger LOG =
+ LoggerFactory.getLogger(KMSWebServer.class);
+
+ private static final String NAME = "kms";
+ private static final String SERVLET_PATH = "/kms";
+
+ private final HttpServer2 httpServer;
+ private final String scheme;
+
+ KMSWebServer(Configuration cnf) throws Exception {
+ ConfigurationWithLogging conf = new ConfigurationWithLogging(cnf);
+
+ // Add SSL configuration file
+ conf.addResource(conf.get(SSLFactory.SSL_SERVER_CONF_KEY,
+ SSLFactory.SSL_SERVER_CONF_DEFAULT));
+
+ // Override configuration with deprecated environment variables.
+ deprecateEnv("KMS_TEMP", conf, HttpServer2.HTTP_TEMP_DIR_KEY,
+ KMSConfiguration.KMS_SITE_XML);
+ deprecateEnv("KMS_HTTP_PORT", conf,
+ KMSConfiguration.HTTP_PORT_KEY, KMSConfiguration.KMS_SITE_XML);
+ deprecateEnv("KMS_MAX_THREADS", conf,
+ HttpServer2.HTTP_MAX_THREADS_KEY, KMSConfiguration.KMS_SITE_XML);
+ deprecateEnv("KMS_MAX_HTTP_HEADER_SIZE", conf,
+ HttpServer2.HTTP_MAX_REQUEST_HEADER_SIZE_KEY,
+ KMSConfiguration.KMS_SITE_XML);
+ deprecateEnv("KMS_MAX_HTTP_HEADER_SIZE", conf,
+ HttpServer2.HTTP_MAX_RESPONSE_HEADER_SIZE_KEY,
+ KMSConfiguration.KMS_SITE_XML);
+ deprecateEnv("KMS_SSL_ENABLED", conf,
+ KMSConfiguration.SSL_ENABLED_KEY, KMSConfiguration.KMS_SITE_XML);
+ deprecateEnv("KMS_SSL_KEYSTORE_FILE", conf,
+ SSLFactory.SSL_SERVER_KEYSTORE_LOCATION,
+ SSLFactory.SSL_SERVER_CONF_DEFAULT);
+ deprecateEnv("KMS_SSL_KEYSTORE_PASS", conf,
+ SSLFactory.SSL_SERVER_KEYSTORE_PASSWORD,
+ SSLFactory.SSL_SERVER_CONF_DEFAULT);
+
+ boolean sslEnabled = conf.getBoolean(KMSConfiguration.SSL_ENABLED_KEY,
+ KMSConfiguration.SSL_ENABLED_DEFAULT);
+ scheme = sslEnabled ? HttpServer2.HTTPS_SCHEME : HttpServer2.HTTP_SCHEME;
+
+ String host = conf.get(KMSConfiguration.HTTP_HOST_KEY,
+ KMSConfiguration.HTTP_HOST_DEFAULT);
+ int port = conf.getInt(KMSConfiguration.HTTP_PORT_KEY,
+ KMSConfiguration.HTTP_PORT_DEFAULT);
+ URI endpoint = new URI(scheme, null, host, port, null, null, null);
+
+ httpServer = new HttpServer2.Builder()
+ .setName(NAME)
+ .setConf(conf)
+ .setSSLConf(conf)
+ .authFilterConfigurationPrefix(KMSAuthenticationFilter.CONFIG_PREFIX)
+ .addEndpoint(endpoint)
+ .build();
+ }
+
+ /**
+ * Load the deprecated environment variable into the configuration.
+ *
+ * @param varName the environment variable name
+ * @param conf the configuration
+ * @param propName the configuration property name
+ * @param confFile the configuration file name
+ */
+ private static void deprecateEnv(String varName, Configuration conf,
+ String propName, String confFile) {
+ String value = System.getenv(varName);
+ if (value == null) {
+ return;
+ }
+ String propValue = conf.get(propName);
+ LOG.warn("Environment variable {} = '{}' is deprecated and overriding"
+ + " property {} = '{}', please set the property in {} instead.",
+ varName, value, propName, propValue, confFile);
+ conf.set(propName, value, "environment variable " + varName);
+ }
+
+ public void start() throws IOException {
+ httpServer.start();
+ }
+
+ public boolean isRunning() {
+ return httpServer.isAlive();
+ }
+
+ public void join() throws InterruptedException {
+ httpServer.join();
+ }
+
+ public void stop() throws Exception {
+ httpServer.stop();
+ }
+
+ public URL getKMSUrl() {
+ InetSocketAddress addr = httpServer.getConnectorAddress(0);
+ if (null == addr) {
+ return null;
+ }
+ try {
+ return new URL(scheme, addr.getHostName(), addr.getPort(),
+ SERVLET_PATH);
+ } catch (MalformedURLException ex) {
+ throw new RuntimeException("It should never happen: " + ex.getMessage(),
+ ex);
+ }
+ }
+
+ public static void main(String[] args) throws Exception {
+ StringUtils.startupShutdownMessage(KMSWebServer.class, args, LOG);
+ Configuration conf = KMSConfiguration.getKMSConf();
+ KMSWebServer kmsWebServer = new KMSWebServer(conf);
+ kmsWebServer.start();
+ kmsWebServer.join();
+ }
+}
diff --git a/hadoop-common-project/hadoop-kms/src/main/libexec/kms-config.sh b/hadoop-common-project/hadoop-kms/src/main/libexec/kms-config.sh
deleted file mode 100644
index 52dba3882e09e..0000000000000
--- a/hadoop-common-project/hadoop-kms/src/main/libexec/kms-config.sh
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/usr/bin/env bash
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-function hadoop_subproject_init
-{
- local this
- local binparent
- local varlist
-
- if [[ -z "${HADOOP_KMS_ENV_PROCESSED}" ]]; then
- if [[ -e "${HADOOP_CONF_DIR}/kms-env.sh" ]]; then
- . "${HADOOP_CONF_DIR}/kms-env.sh"
- export HADOOP_KMS_ENV_PROCESSED=true
- fi
- fi
-
- export HADOOP_CATALINA_PREFIX=kms
-
- export HADOOP_CATALINA_TEMP="${KMS_TEMP:-${HADOOP_HOME}/temp}"
-
- hadoop_deprecate_envvar KMS_CONFIG HADOOP_CONF_DIR
-
- hadoop_deprecate_envvar KMS_LOG HADOOP_LOG_DIR
-
- export HADOOP_CATALINA_CONFIG="${HADOOP_CONF_DIR}"
- export HADOOP_CATALINA_LOG="${HADOOP_LOG_DIR}"
-
- export HADOOP_CATALINA_HTTP_PORT="${KMS_HTTP_PORT:-9600}"
- export HADOOP_CATALINA_ADMIN_PORT="${KMS_ADMIN_PORT:-$((HADOOP_CATALINA_HTTP_PORT+1))}"
- export HADOOP_CATALINA_MAX_THREADS="${KMS_MAX_THREADS:-1000}"
- export HADOOP_CATALINA_MAX_HTTP_HEADER_SIZE="${KMS_MAX_HTTP_HEADER_SIZE:-65536}"
-
- export HADOOP_CATALINA_SSL_KEYSTORE_FILE="${KMS_SSL_KEYSTORE_FILE:-${HOME}/.keystore}"
-
- export CATALINA_BASE="${CATALINA_BASE:-${HADOOP_HOME}/share/hadoop/kms/tomcat}"
- export HADOOP_CATALINA_HOME="${KMS_CATALINA_HOME:-${CATALINA_BASE}}"
-
- export CATALINA_OUT="${CATALINA_OUT:-${HADOOP_LOG_DIR}/hadoop-${HADOOP_IDENT_STRING}-kms-${HOSTNAME}.out}"
-
- export CATALINA_PID="${CATALINA_PID:-${HADOOP_PID_DIR}/hadoop-${HADOOP_IDENT_STRING}-kms.pid}"
-
- if [[ -n "${HADOOP_SHELL_SCRIPT_DEBUG}" ]]; then
- varlist=$(env | egrep '(^KMS|^CATALINA)' | cut -f1 -d= | grep -v _PASS)
- for i in ${varlist}; do
- hadoop_debug "Setting ${i} to ${!i}"
- done
- fi
-}
-
-if [[ -n "${HADOOP_COMMON_HOME}" ]] &&
- [[ -e "${HADOOP_COMMON_HOME}/libexec/hadoop-config.sh" ]]; then
- . "${HADOOP_COMMON_HOME}/libexec/hadoop-config.sh"
-elif [[ -e "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" ]]; then
- . "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh"
-elif [[ -e "${HADOOP_HOME}/libexec/hadoop-config.sh" ]]; then
- . "${HADOOP_HOME}/libexec/hadoop-config.sh"
-else
- echo "ERROR: Hadoop common not found." 2>&1
- exit 1
-fi
diff --git a/hadoop-common-project/hadoop-kms/src/main/libexec/shellprofile.d/hadoop-kms.sh b/hadoop-common-project/hadoop-kms/src/main/libexec/shellprofile.d/hadoop-kms.sh
new file mode 100755
index 0000000000000..c53071634681e
--- /dev/null
+++ b/hadoop-common-project/hadoop-kms/src/main/libexec/shellprofile.d/hadoop-kms.sh
@@ -0,0 +1,57 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+if [[ "${HADOOP_SHELL_EXECNAME}" = hadoop ]]; then
+ hadoop_add_subcommand "kms" "run KMS, the Key Management Server"
+fi
+
+## @description Command handler for kms subcommand
+## @audience private
+## @stability stable
+## @replaceable no
+function hadoop_subcommand_kms
+{
+ if [[ -f "${HADOOP_CONF_DIR}/kms-env.sh" ]]; then
+ # shellcheck disable=SC1090
+ . "${HADOOP_CONF_DIR}/kms-env.sh"
+ fi
+
+ hadoop_deprecate_envvar KMS_CONFIG HADOOP_CONF_DIR
+ hadoop_deprecate_envvar KMS_LOG HADOOP_LOG_DIR
+
+ hadoop_using_envvar KMS_HTTP_PORT
+ hadoop_using_envvar KMS_MAX_HTTP_HEADER_SIZE
+ hadoop_using_envvar KMS_MAX_THREADS
+ hadoop_using_envvar KMS_SSL_ENABLED
+ hadoop_using_envvar KMS_SSL_KEYSTORE_FILE
+ hadoop_using_envvar KMS_TEMP
+
+ # shellcheck disable=SC2034
+ HADOOP_SUBCMD_SUPPORTDAEMONIZATION=true
+ # shellcheck disable=SC2034
+ HADOOP_CLASSNAME=org.apache.hadoop.crypto.key.kms.server.KMSWebServer
+
+ hadoop_add_param HADOOP_OPTS "-Dkms.config.dir=" \
+ "-Dkms.config.dir=${HADOOP_CONF_DIR}"
+ hadoop_add_param HADOOP_OPTS "-Dkms.log.dir=" \
+ "-Dkms.log.dir=${HADOOP_LOG_DIR}"
+
+ if [[ "${HADOOP_DAEMON_MODE}" == "default" ]] ||
+ [[ "${HADOOP_DAEMON_MODE}" == "start" ]]; then
+ hadoop_mkdir "${KMS_TEMP:-${HADOOP_HOME}/temp}"
+ fi
+}
\ No newline at end of file
diff --git a/hadoop-common-project/hadoop-kms/src/main/resources/kms-default.xml b/hadoop-common-project/hadoop-kms/src/main/resources/kms-default.xml
new file mode 100644
index 0000000000000..2b178b85ac611
--- /dev/null
+++ b/hadoop-common-project/hadoop-kms/src/main/resources/kms-default.xml
@@ -0,0 +1,248 @@
+
+
+
+
+
+
+
+
+ hadoop.kms.http.port
+ 9600
+
+ The HTTP port for KMS REST API.
+
+
+
+
+ hadoop.kms.http.host
+ 0.0.0.0
+
+ The bind host for KMS REST API.
+
+
+
+
+ hadoop.kms.ssl.enabled
+ false
+
+ Whether SSL is enabled. Default is false, i.e. disabled.
+
+
+
+
+
+
+ hadoop.http.max.threads
+ 1000
+
+ The maxmimum number of threads.
+
+
+
+
+ hadoop.http.max.request.header.size
+ 65536
+
+ The maxmimum HTTP request header size.
+
+
+
+
+ hadoop.http.max.response.header.size
+ 65536
+
+ The maxmimum HTTP response header size.
+
+
+
+
+ hadoop.http.temp.dir
+ ${hadoop.tmp.dir}/kms
+
+ KMS temp directory.
+
+
+
+
+
+
+ hadoop.kms.key.provider.uri
+ jceks://file@/${user.home}/kms.keystore
+
+ URI of the backing KeyProvider for the KMS.
+
+
+
+
+ hadoop.security.keystore.java-keystore-provider.password-file
+
+
+ If using the JavaKeyStoreProvider, the file name for the keystore password.
+
+
+
+
+
+
+ hadoop.kms.cache.enable
+ true
+
+ Whether the KMS will act as a cache for the backing KeyProvider.
+ When the cache is enabled, operations like getKeyVersion, getMetadata,
+ and getCurrentKey will sometimes return cached data without consulting
+ the backing KeyProvider. Cached values are flushed when keys are deleted
+ or modified.
+
+
+
+
+ hadoop.kms.cache.timeout.ms
+ 600000
+
+ Expiry time for the KMS key version and key metadata cache, in
+ milliseconds. This affects getKeyVersion and getMetadata.
+
+
+
+
+ hadoop.kms.current.key.cache.timeout.ms
+ 30000
+
+ Expiry time for the KMS current key cache, in milliseconds. This
+ affects getCurrentKey operations.
+
+
+
+
+
+
+ hadoop.kms.audit.aggregation.window.ms
+ 10000
+
+ Duplicate audit log events within the aggregation window (specified in
+ ms) are quashed to reduce log traffic. A single message for aggregated
+ events is printed at the end of the window, along with a count of the
+ number of aggregated events.
+
+
+
+
+
+
+ hadoop.kms.authentication.type
+ simple
+
+ Authentication type for the KMS. Can be either 'simple' (default) or
+ 'kerberos'.
+
+
+
+
+ hadoop.kms.authentication.kerberos.keytab
+ ${user.home}/kms.keytab
+
+ Path to the keytab with credentials for the configured Kerberos principal.
+
+
+
+
+ hadoop.kms.authentication.kerberos.principal
+ HTTP/localhost
+
+ The Kerberos principal to use for the HTTP endpoint.
+ The principal must start with 'HTTP/' as per the Kerberos HTTP SPNEGO specification.
+
+
+
+
+ hadoop.kms.authentication.kerberos.name.rules
+ DEFAULT
+
+ Rules used to resolve Kerberos principal names.
+
+
+
+
+
+
+ hadoop.kms.authentication.signer.secret.provider
+ random
+
+ Indicates how the secret to sign the authentication cookies will be
+ stored. Options are 'random' (default), 'string' and 'zookeeper'.
+ If using a setup with multiple KMS instances, 'zookeeper' should be used.
+
+
+
+
+
+
+ hadoop.kms.authentication.signer.secret.provider.zookeeper.path
+ /hadoop-kms/hadoop-auth-signature-secret
+
+ The Zookeeper ZNode path where the KMS instances will store and retrieve
+ the secret from.
+
+
+
+
+ hadoop.kms.authentication.signer.secret.provider.zookeeper.connection.string
+ #HOSTNAME#:#PORT#,...
+
+ The Zookeeper connection string, a list of hostnames and port comma
+ separated.
+
+
+
+
+ hadoop.kms.authentication.signer.secret.provider.zookeeper.auth.type
+ none
+
+ The Zookeeper authentication type, 'none' (default) or 'sasl' (Kerberos).
+
+
+
+
+ hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.keytab
+ /etc/hadoop/conf/kms.keytab
+
+ The absolute path for the Kerberos keytab with the credentials to
+ connect to Zookeeper.
+
+
+
+
+ hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.principal
+ kms/#HOSTNAME#
+
+ The Kerberos service principal used to connect to Zookeeper.
+
+
+
+
+ hadoop.kms.audit.logger
+ org.apache.hadoop.crypto.key.kms.server.SimpleKMSAuditLogger
+
+ The audit logger for KMS. It is a comma-separated list of KMSAuditLogger
+ class names. Default is the text-format SimpleKMSAuditLogger only.
+ If this is not configured, default will be used.
+
+
+
+
diff --git a/hadoop-common-project/hadoop-kms/src/main/webapp/WEB-INF/web.xml b/hadoop-common-project/hadoop-kms/src/main/resources/webapps/kms/WEB-INF/web.xml
similarity index 87%
rename from hadoop-common-project/hadoop-kms/src/main/webapp/WEB-INF/web.xml
rename to hadoop-common-project/hadoop-kms/src/main/resources/webapps/kms/WEB-INF/web.xml
index d081764217e33..1c14d285186cb 100644
--- a/hadoop-common-project/hadoop-kms/src/main/webapp/WEB-INF/web.xml
+++ b/hadoop-common-project/hadoop-kms/src/main/resources/webapps/kms/WEB-INF/web.xml
@@ -40,19 +40,9 @@
1
-
- jmx-servlet
- org.apache.hadoop.crypto.key.kms.server.KMSJMXServlet
-
-
webservices-driver
- /*
-
-
-
- jmx-servlet
- /jmx
+ /kms/*
diff --git a/hadoop-common-project/hadoop-kms/src/main/tomcat/ROOT/index.html b/hadoop-common-project/hadoop-kms/src/main/resources/webapps/static/index.html
similarity index 62%
rename from hadoop-common-project/hadoop-kms/src/main/tomcat/ROOT/index.html
rename to hadoop-common-project/hadoop-kms/src/main/resources/webapps/static/index.html
index e9e45121b15c5..9925ad93040ba 100644
--- a/hadoop-common-project/hadoop-kms/src/main/tomcat/ROOT/index.html
+++ b/hadoop-common-project/hadoop-kms/src/main/resources/webapps/static/index.html
@@ -20,8 +20,16 @@
Hadoop KMS