Skip to content

Commit 610805e

Browse files
committed
HDFS-15052. WebHDFS getTrashRoot leads to OOM due to FileSystem object creation. (#1758)
(cherry picked from commit 2338d25) Conflicts: hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java
1 parent b52f201 commit 610805e

File tree

4 files changed

+123
-15
lines changed

4 files changed

+123
-15
lines changed

hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSUtilClient.java

Lines changed: 28 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@
3535
import org.apache.hadoop.hdfs.protocol.ClientDatanodeProtocol;
3636
import org.apache.hadoop.hdfs.protocol.DatanodeID;
3737
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
38+
import org.apache.hadoop.hdfs.protocol.EncryptionZone;
3839
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
3940
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
4041
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
@@ -1003,7 +1004,7 @@ public static Path makePathFromFileId(long fileId) {
10031004
* @param ugi {@link UserGroupInformation} of current user.
10041005
* @return the home directory of current user.
10051006
*/
1006-
public static Path getHomeDirectory(Configuration conf,
1007+
public static String getHomeDirectory(Configuration conf,
10071008
UserGroupInformation ugi) {
10081009
String userHomePrefix = HdfsClientConfigKeys
10091010
.DFS_USER_HOME_DIR_PREFIX_DEFAULT;
@@ -1012,6 +1013,31 @@ public static Path getHomeDirectory(Configuration conf,
10121013
HdfsClientConfigKeys.DFS_USER_HOME_DIR_PREFIX_KEY,
10131014
HdfsClientConfigKeys.DFS_USER_HOME_DIR_PREFIX_DEFAULT);
10141015
}
1015-
return new Path(userHomePrefix + "/" + ugi.getShortUserName());
1016+
return userHomePrefix + Path.SEPARATOR + ugi.getShortUserName();
1017+
}
1018+
1019+
/**
1020+
* Returns trash root in non-encryption zone.
1021+
* @param conf configuration.
1022+
* @param ugi user of trash owner.
1023+
* @return unqualified path of trash root.
1024+
*/
1025+
public static String getTrashRoot(Configuration conf,
1026+
UserGroupInformation ugi) {
1027+
return getHomeDirectory(conf, ugi)
1028+
+ Path.SEPARATOR + FileSystem.TRASH_PREFIX;
1029+
}
1030+
1031+
/**
1032+
* Returns trash root in encryption zone.
1033+
* @param ez encryption zone.
1034+
* @param ugi user of trash owner.
1035+
* @return unqualified path of trash root.
1036+
*/
1037+
public static String getEZTrashRoot(EncryptionZone ez,
1038+
UserGroupInformation ugi) {
1039+
String ezpath = ez.getPath();
1040+
return (ezpath.equals("/") ? ezpath : ezpath + Path.SEPARATOR)
1041+
+ FileSystem.TRASH_PREFIX + Path.SEPARATOR + ugi.getShortUserName();
10161042
}
10171043
}

hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -209,7 +209,8 @@ public void setWorkingDirectory(Path dir) {
209209

210210
@Override
211211
public Path getHomeDirectory() {
212-
return makeQualified(DFSUtilClient.getHomeDirectory(getConf(), dfs.ugi));
212+
return makeQualified(
213+
new Path(DFSUtilClient.getHomeDirectory(getConf(), dfs.ugi)));
213214
}
214215

215216
/**
@@ -3105,8 +3106,7 @@ public Path getTrashRoot(Path path) {
31053106
EncryptionZone ez = dfs.getEZForPath(parentSrc);
31063107
if ((ez != null)) {
31073108
return this.makeQualified(
3108-
new Path(new Path(ez.getPath(), FileSystem.TRASH_PREFIX),
3109-
dfs.ugi.getShortUserName()));
3109+
new Path(DFSUtilClient.getEZTrashRoot(ez, dfs.ugi)));
31103110
}
31113111
} catch (IOException e) {
31123112
DFSClient.LOG.warn("Exception in checking the encryption zone for the " +
@@ -3133,7 +3133,8 @@ public Collection<FileStatus> getTrashRoots(boolean allUsers) {
31333133
// Get EZ Trash roots
31343134
final RemoteIterator<EncryptionZone> it = dfs.listEncryptionZones();
31353135
while (it.hasNext()) {
3136-
Path ezTrashRoot = new Path(it.next().getPath(),
3136+
EncryptionZone ez = it.next();
3137+
Path ezTrashRoot = new Path(ez.getPath(),
31373138
FileSystem.TRASH_PREFIX);
31383139
if (!exists(ezTrashRoot)) {
31393140
continue;
@@ -3145,7 +3146,7 @@ public Collection<FileStatus> getTrashRoots(boolean allUsers) {
31453146
}
31463147
}
31473148
} else {
3148-
Path userTrash = new Path(ezTrashRoot, dfs.ugi.getShortUserName());
3149+
Path userTrash = new Path(DFSUtilClient.getEZTrashRoot(ez, dfs.ugi));
31493150
try {
31503151
ret.add(getFileStatus(userTrash));
31513152
} catch (FileNotFoundException ignored) {

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java

Lines changed: 36 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,6 @@
6363
import org.apache.hadoop.fs.ContentSummary;
6464
import org.apache.hadoop.fs.FileEncryptionInfo;
6565
import org.apache.hadoop.fs.FileStatus;
66-
import org.apache.hadoop.fs.FileSystem;
6766
import org.apache.hadoop.fs.FsServerDefaults;
6867
import org.apache.hadoop.fs.Options;
6968
import org.apache.hadoop.fs.XAttr;
@@ -79,6 +78,7 @@
7978
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
8079
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
8180
import org.apache.hadoop.hdfs.protocol.DirectoryListing;
81+
import org.apache.hadoop.hdfs.protocol.EncryptionZone;
8282
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
8383
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
8484
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
@@ -1199,7 +1199,7 @@ protected Response get(
11991199
return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
12001200
}
12011201
case GETHOMEDIRECTORY: {
1202-
String userHome = DFSUtilClient.getHomeDirectory(conf, ugi).toString();
1202+
String userHome = DFSUtilClient.getHomeDirectory(conf, ugi);
12031203
final String js = JsonUtil.toJsonString("Path", userHome);
12041204
return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
12051205
}
@@ -1240,7 +1240,7 @@ protected Response get(
12401240
return Response.ok().build();
12411241
}
12421242
case GETTRASHROOT: {
1243-
final String trashPath = getTrashRoot(fullpath, conf);
1243+
final String trashPath = getTrashRoot(conf, fullpath);
12441244
final String jsonStr = JsonUtil.toJsonString("Path", trashPath);
12451245
return Response.ok(jsonStr).type(MediaType.APPLICATION_JSON).build();
12461246
}
@@ -1300,11 +1300,39 @@ protected Response get(
13001300
}
13011301
}
13021302

1303-
private static String getTrashRoot(String fullPath,
1304-
Configuration conf) throws IOException {
1305-
FileSystem fs = FileSystem.get(conf != null ? conf : new Configuration());
1306-
return fs.getTrashRoot(
1307-
new org.apache.hadoop.fs.Path(fullPath)).toUri().getPath();
1303+
private String getTrashRoot(Configuration conf, String fullPath)
1304+
throws IOException {
1305+
UserGroupInformation ugi= UserGroupInformation.getCurrentUser();
1306+
String parentSrc = getParent(fullPath);
1307+
EncryptionZone ez = getRpcClientProtocol().getEZForPath(
1308+
parentSrc != null ? parentSrc : fullPath);
1309+
String trashRoot;
1310+
if (ez != null) {
1311+
trashRoot = DFSUtilClient.getEZTrashRoot(ez, ugi);
1312+
} else {
1313+
trashRoot = DFSUtilClient.getTrashRoot(conf, ugi);
1314+
}
1315+
return trashRoot;
1316+
}
1317+
1318+
/**
1319+
* Returns the parent of a path in the same way as Path#getParent.
1320+
* @return the parent of a path or null if at root
1321+
*/
1322+
public String getParent(String path) {
1323+
int lastSlash = path.lastIndexOf('/');
1324+
int start = 0;
1325+
if ((path.length() == start) || // empty path
1326+
(lastSlash == start && path.length() == start + 1)) { // at root
1327+
return null;
1328+
}
1329+
String parent;
1330+
if (lastSlash == -1) {
1331+
parent = org.apache.hadoop.fs.Path.CUR_DIR;
1332+
} else {
1333+
parent = path.substring(0, lastSlash == start ? start + 1 : lastSlash);
1334+
}
1335+
return parent;
13081336
}
13091337

13101338
private static DirectoryListing getDirectoryListing(final ClientProtocol cp,

hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@
3434
import static org.junit.Assert.fail;
3535

3636
import java.io.EOFException;
37+
import java.io.File;
3738
import java.io.IOException;
3839
import java.io.InputStream;
3940
import java.io.OutputStream;
@@ -48,6 +49,7 @@
4849
import java.security.PrivilegedExceptionAction;
4950
import java.util.Arrays;
5051
import java.util.Collection;
52+
import java.util.EnumSet;
5153
import java.util.Iterator;
5254
import java.util.Map;
5355
import java.util.Random;
@@ -61,11 +63,13 @@
6163
import org.apache.hadoop.fs.BlockLocation;
6264
import org.apache.hadoop.fs.BlockStoragePolicySpi;
6365
import org.apache.hadoop.fs.CommonConfigurationKeys;
66+
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
6467
import org.apache.hadoop.fs.ContentSummary;
6568
import org.apache.hadoop.fs.FSDataInputStream;
6669
import org.apache.hadoop.fs.FSDataOutputStream;
6770
import org.apache.hadoop.fs.FileStatus;
6871
import org.apache.hadoop.fs.FileSystem;
72+
import org.apache.hadoop.fs.FileSystemTestHelper;
6973
import org.apache.hadoop.fs.FsServerDefaults;
7074
import org.apache.hadoop.fs.Path;
7175
import org.apache.hadoop.fs.RemoteIterator;
@@ -84,6 +88,8 @@
8488
import org.apache.hadoop.hdfs.MiniDFSCluster;
8589
import org.apache.hadoop.hdfs.TestDFSClientRetries;
8690
import org.apache.hadoop.hdfs.TestFileCreation;
91+
import org.apache.hadoop.hdfs.client.CreateEncryptionZoneFlag;
92+
import org.apache.hadoop.hdfs.client.HdfsAdmin;
8793
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
8894
import org.apache.hadoop.hdfs.protocol.DSQuotaExceededException;
8995
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
@@ -1636,6 +1642,53 @@ public void testGetTrashRoot() throws Exception {
16361642
}
16371643
}
16381644

1645+
@Test
1646+
public void testGetEZTrashRoot() throws Exception {
1647+
final Configuration conf = WebHdfsTestUtil.createConf();
1648+
FileSystemTestHelper fsHelper = new FileSystemTestHelper();
1649+
File testRootDir = new File(fsHelper.getTestRootDir()).getAbsoluteFile();
1650+
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH,
1651+
"jceks://file" + new Path(testRootDir.toString(), "test.jks").toUri());
1652+
final MiniDFSCluster cluster =
1653+
new MiniDFSCluster.Builder(conf).numDataNodes(0).build();
1654+
cluster.waitActive();
1655+
DistributedFileSystem dfs = cluster.getFileSystem();
1656+
final WebHdfsFileSystem webhdfs = WebHdfsTestUtil.getWebHdfsFileSystem(
1657+
conf, WebHdfsConstants.WEBHDFS_SCHEME);
1658+
HdfsAdmin dfsAdmin = new HdfsAdmin(cluster.getURI(), conf);
1659+
dfs.getClient().setKeyProvider(
1660+
cluster.getNameNode().getNamesystem().getProvider());
1661+
final String testkey = "test_key";
1662+
DFSTestUtil.createKey(testkey, cluster, conf);
1663+
1664+
final Path zone1 = new Path("/zone1");
1665+
dfs.mkdirs(zone1, new FsPermission(700));
1666+
dfsAdmin.createEncryptionZone(zone1, testkey,
1667+
EnumSet.of(CreateEncryptionZoneFlag.PROVISION_TRASH));
1668+
1669+
final Path insideEZ = new Path(zone1, "insideEZ");
1670+
dfs.mkdirs(insideEZ, new FsPermission(700));
1671+
assertEquals(
1672+
dfs.getTrashRoot(insideEZ).toUri().getPath(),
1673+
webhdfs.getTrashRoot(insideEZ).toUri().getPath());
1674+
1675+
final Path outsideEZ = new Path("/outsideEZ");
1676+
dfs.mkdirs(outsideEZ, new FsPermission(755));
1677+
assertEquals(
1678+
dfs.getTrashRoot(outsideEZ).toUri().getPath(),
1679+
webhdfs.getTrashRoot(outsideEZ).toUri().getPath());
1680+
1681+
final Path root = new Path("/");
1682+
assertEquals(
1683+
dfs.getTrashRoot(root).toUri().getPath(),
1684+
webhdfs.getTrashRoot(root).toUri().getPath());
1685+
assertEquals(
1686+
webhdfs.getTrashRoot(root).toUri().getPath(),
1687+
webhdfs.getTrashRoot(zone1).toUri().getPath());
1688+
assertEquals(
1689+
webhdfs.getTrashRoot(outsideEZ).toUri().getPath(),
1690+
webhdfs.getTrashRoot(zone1).toUri().getPath());
1691+
}
16391692

16401693
@Test
16411694
public void testStoragePolicy() throws Exception {

0 commit comments

Comments
 (0)