diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 15b529a5e34a8..9be5da86e18f9 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -908,6 +908,8 @@ Release 2.7.0 - UNRELEASED HDFS-7929. inotify unable fetch pre-upgrade edit log segments once upgrade starts (Zhe Zhang via Colin P. McCabe) + HDFS-7816. Unable to open webhdfs paths with "+". (wheat9 via kihwal) + BREAKDOWN OF HDFS-7584 SUBTASKS AND RELATED JIRAS HDFS-7720. Quota by Storage Type API, tools and ClientNameNode diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/ParameterParser.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/ParameterParser.java index f34402f2c210b..0ebf3dcc5224c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/ParameterParser.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/ParameterParser.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hdfs.server.datanode.web.webhdfs; import io.netty.handler.codec.http.QueryStringDecoder; +import org.apache.commons.io.Charsets; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.HAUtil; @@ -39,6 +40,7 @@ import java.io.IOException; import java.net.URI; +import java.nio.charset.Charset; import java.util.List; import java.util.Map; @@ -51,7 +53,8 @@ class ParameterParser { private final Map> params; ParameterParser(QueryStringDecoder decoder, Configuration conf) { - this.path = QueryStringDecoder.decodeComponent(decoder.path().substring(WEBHDFS_PREFIX_LENGTH)); + this.path = decodeComponent(decoder.path().substring + (WEBHDFS_PREFIX_LENGTH), Charsets.UTF_8); this.params = decoder.parameters(); this.conf = conf; } @@ -127,4 +130,78 @@ private String param(String key) { List p = params.get(key); return p == null ? null : p.get(0); } + + /** + * The following function behaves exactly the same as netty's + * QueryStringDecoder#decodeComponent except that it + * does not decode the '+' character as space. WebHDFS takes this scheme + * to maintain the backward-compatibility for pre-2.7 releases. + */ + private static String decodeComponent(final String s, final Charset charset) { + if (s == null) { + return ""; + } + final int size = s.length(); + boolean modified = false; + for (int i = 0; i < size; i++) { + final char c = s.charAt(i); + if (c == '%' || c == '+') { + modified = true; + break; + } + } + if (!modified) { + return s; + } + final byte[] buf = new byte[size]; + int pos = 0; // position in `buf'. + for (int i = 0; i < size; i++) { + char c = s.charAt(i); + if (c == '%') { + if (i == size - 1) { + throw new IllegalArgumentException("unterminated escape sequence at" + + " end of string: " + s); + } + c = s.charAt(++i); + if (c == '%') { + buf[pos++] = '%'; // "%%" -> "%" + break; + } + if (i == size - 1) { + throw new IllegalArgumentException("partial escape sequence at end " + + "of string: " + s); + } + c = decodeHexNibble(c); + final char c2 = decodeHexNibble(s.charAt(++i)); + if (c == Character.MAX_VALUE || c2 == Character.MAX_VALUE) { + throw new IllegalArgumentException( + "invalid escape sequence `%" + s.charAt(i - 1) + s.charAt( + i) + "' at index " + (i - 2) + " of: " + s); + } + c = (char) (c * 16 + c2); + // Fall through. + } + buf[pos++] = (byte) c; + } + return new String(buf, 0, pos, charset); + } + + /** + * Helper to decode half of a hexadecimal number from a string. + * @param c The ASCII character of the hexadecimal number to decode. + * Must be in the range {@code [0-9a-fA-F]}. + * @return The hexadecimal value represented in the ASCII character + * given, or {@link Character#MAX_VALUE} if the character is invalid. + */ + private static char decodeHexNibble(final char c) { + if ('0' <= c && c <= '9') { + return (char) (c - '0'); + } else if ('a' <= c && c <= 'f') { + return (char) (c - 'a' + 10); + } else if ('A' <= c && c <= 'F') { + return (char) (c - 'A' + 10); + } else { + return Character.MAX_VALUE; + } + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/TestParameterParser.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/TestParameterParser.java index 8aee1d8565c1c..217d6b572d9b6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/TestParameterParser.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/TestParameterParser.java @@ -56,13 +56,12 @@ public void testDeserializeHAToken() throws IOException { @Test public void testDecodePath() { - final String SCAPED_PATH = "hdfs-6662/test%25251%26%3Dtest?op=OPEN"; - final String EXPECTED_PATH = "/hdfs-6662/test%251&=test"; + final String ESCAPED_PATH = "/test%25+1%26%3Dtest?op=OPEN&foo=bar"; + final String EXPECTED_PATH = "/test%+1&=test"; - Configuration conf = DFSTestUtil.newHAConfiguration(LOGICAL_NAME); + Configuration conf = new Configuration(); QueryStringDecoder decoder = new QueryStringDecoder( - WebHdfsHandler.WEBHDFS_PREFIX + "/" - + SCAPED_PATH); + WebHdfsHandler.WEBHDFS_PREFIX + ESCAPED_PATH); ParameterParser testParser = new ParameterParser(decoder, conf); Assert.assertEquals(EXPECTED_PATH, testParser.path()); }