Skip to content

HADOOP-18666. A whitelist of endpoints to skip Kerberos authentication doesn't work for ResourceManager and Job History Server #5480

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Mar 22, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -497,7 +497,12 @@ public HttpServer2 build() throws IOException {
prefix -> this.conf.get(prefix + "type")
.equals(PseudoAuthenticationHandler.TYPE))
) {
server.initSpnego(conf, hostName, usernameConfKey, keytabConfKey);
server.initSpnego(
conf,
hostName,
getFilterProperties(conf, authFilterConfigurationPrefixes),
usernameConfKey,
keytabConfKey);
}

for (URI ep : endpoints) {
Expand Down Expand Up @@ -1340,8 +1345,12 @@ public void setThreads(int min, int max) {
}

private void initSpnego(Configuration conf, String hostName,
String usernameConfKey, String keytabConfKey) throws IOException {
Properties authFilterConfigurationPrefixes, String usernameConfKey, String keytabConfKey)
throws IOException {
Map<String, String> params = new HashMap<>();
for (Map.Entry<Object, Object> entry : authFilterConfigurationPrefixes.entrySet()) {
params.put(String.valueOf(entry.getKey()), String.valueOf(entry.getValue()));
}
String principalInConf = conf.get(usernameConfKey);
if (principalInConf != null && !principalInConf.isEmpty()) {
params.put("kerberos.principal", SecurityUtil.getServerPrincipal(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,10 @@

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.minikdc.MiniKdc;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.AuthenticationFilterInitializer;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.KerberosTestUtils;
import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
Expand Down Expand Up @@ -104,7 +106,9 @@ public static void tearDown() {
*/
@Test
public void testAuthenticationWithProxyUser() throws Exception {
Configuration spengoConf = getSpengoConf(new Configuration());
Configuration spnegoConf = getSpnegoConf(new Configuration());
spnegoConf.set(HttpServer2.FILTER_INITIALIZER_PROPERTY,
ProxyUserAuthenticationFilterInitializer.class.getName());

//setup logs dir
System.setProperty("hadoop.log.dir", testRootDir.getAbsolutePath());
Expand All @@ -118,15 +122,15 @@ public void testAuthenticationWithProxyUser() throws Exception {
new String[]{"groupC"});

// Make userA impersonate users in groupB
spengoConf.set("hadoop.proxyuser.userA.hosts", "*");
spengoConf.set("hadoop.proxyuser.userA.groups", "groupB");
ProxyUsers.refreshSuperUserGroupsConfiguration(spengoConf);
spnegoConf.set("hadoop.proxyuser.userA.hosts", "*");
spnegoConf.set("hadoop.proxyuser.userA.groups", "groupB");
ProxyUsers.refreshSuperUserGroupsConfiguration(spnegoConf);

HttpServer2 httpServer = null;
try {
// Create http server to test.
httpServer = getCommonBuilder()
.setConf(spengoConf)
.setConf(spnegoConf)
.setACL(new AccessControlList("userA groupA"))
.build();
httpServer.start();
Expand Down Expand Up @@ -191,6 +195,48 @@ public void testAuthenticationWithProxyUser() throws Exception {
}
}

@Test
public void testAuthenticationToAllowList() throws Exception {
Configuration spnegoConf = getSpnegoConf(new Configuration());
String[] allowList = new String[] {"/jmx", "/prom"};
String[] denyList = new String[] {"/conf", "/stacks", "/logLevel"};
spnegoConf.set(PREFIX + "kerberos.endpoint.whitelist", String.join(",", allowList));
spnegoConf.set(CommonConfigurationKeysPublic.HADOOP_PROMETHEUS_ENABLED, "true");
spnegoConf.set(HttpServer2.FILTER_INITIALIZER_PROPERTY,
AuthenticationFilterInitializer.class.getName());

//setup logs dir
System.setProperty("hadoop.log.dir", testRootDir.getAbsolutePath());

HttpServer2 httpServer = null;
try {
// Create http server to test.
httpServer = getCommonBuilder().setConf(spnegoConf).setSecurityEnabled(true)
.setUsernameConfKey(PREFIX + "kerberos.principal")
.setKeytabConfKey(PREFIX + "kerberos.keytab").build();
httpServer.start();

String serverURL = "http://" + NetUtils.getHostPortString(httpServer.getConnectorAddress(0));

// endpoints in whitelist should not require Kerberos authentication
for (String endpoint : allowList) {
HttpURLConnection conn = (HttpURLConnection) new URL(serverURL + endpoint).openConnection();
Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
}

// endpoints not in whitelist should require Kerberos authentication
for (String endpoint : denyList) {
HttpURLConnection conn = (HttpURLConnection) new URL(serverURL + endpoint).openConnection();
Assert.assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, conn.getResponseCode());
}

} finally {
if (httpServer != null) {
httpServer.stop();
}
}
}

private AuthenticatedURL.Token getEncryptedAuthToken(Signer signer,
String user) throws Exception {
AuthenticationToken token =
Expand All @@ -209,10 +255,8 @@ private Signer getSignerToEncrypt() throws Exception {
return new Signer(secretProvider);
}

private Configuration getSpengoConf(Configuration conf) {
private Configuration getSpnegoConf(Configuration conf) {
conf = new Configuration();
conf.set(HttpServer2.FILTER_INITIALIZER_PROPERTY,
ProxyUserAuthenticationFilterInitializer.class.getName());
conf.set(PREFIX + "type", "kerberos");
conf.setBoolean(PREFIX + "simple.anonymous.allowed", false);
conf.set(PREFIX + "signature.secret.file",
Expand Down