Skip to content

Commit 8567e8f

Browse files
committed
Merge remote-tracking branch 'origin/HDFS-17531' into HDFS-17656
# Conflicts: # hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterRpcServer.java
2 parents d396519 + ea3c4c8 commit 8567e8f

File tree

394 files changed

+12661
-2101
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

394 files changed

+12661
-2101
lines changed

LICENSE-binary

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -244,9 +244,9 @@ com.microsoft.azure:azure-storage:7.0.0
244244
com.nimbusds:nimbus-jose-jwt:9.37.2
245245
com.zaxxer:HikariCP:4.0.3
246246
commons-beanutils:commons-beanutils:1.9.4
247-
commons-cli:commons-cli:1.5.0
247+
commons-cli:commons-cli:1.9.0
248248
commons-codec:commons-codec:1.15
249-
commons-collections:commons-collections:3.2.2
249+
org.apache.commons:commons-collections4:4.4
250250
commons-daemon:commons-daemon:1.0.13
251251
commons-io:commons-io:2.16.1
252252
commons-net:commons-net:3.9.0
@@ -297,9 +297,8 @@ io.swagger:swagger-annotations:1.5.4
297297
javax.inject:javax.inject:1
298298
net.java.dev.jna:jna:5.2.0
299299
net.minidev:accessors-smart:1.2
300-
org.apache.avro:avro:1.9.2
300+
org.apache.avro:avro:1.11.4
301301
org.apache.avro:avro:1.11.3
302-
org.apache.commons:commons-collections4:4.2
303302
org.apache.commons:commons-compress:1.26.1
304303
org.apache.commons:commons-configuration2:2.10.1
305304
org.apache.commons:commons-csv:1.9.0

hadoop-client-modules/hadoop-client-minicluster/pom.xml

Lines changed: 15 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -168,6 +168,10 @@
168168
<groupId>commons-collections</groupId>
169169
<artifactId>commons-collections</artifactId>
170170
</exclusion>
171+
<exclusion>
172+
<groupId>org.apache.commons</groupId>
173+
<artifactId>commons-collections4</artifactId>
174+
</exclusion>
171175
<exclusion>
172176
<groupId>commons-io</groupId>
173177
<artifactId>commons-io</artifactId>
@@ -403,7 +407,7 @@
403407
<!-- Add back in Mockito since the hadoop-hdfs test jar needs it. -->
404408
<dependency>
405409
<groupId>org.mockito</groupId>
406-
<artifactId>mockito-core</artifactId>
410+
<artifactId>mockito-inline</artifactId>
407411
<optional>true</optional>
408412
</dependency>
409413
<!-- Add back in the transitive dependencies excluded from hadoop-common in client TODO remove once we have a filter for "is in these artifacts" -->
@@ -761,7 +765,7 @@
761765

762766
<!-- Mockito tries to include its own unrelocated copy of hamcrest. :( -->
763767
<filter>
764-
<artifact>org.mockito:mockito-core</artifact>
768+
<artifact>org.mockito:mockito-inline</artifact>
765769
<excludes>
766770
<exclude>asm-license.txt</exclude>
767771
<exclude>cglib-license.txt</exclude>
@@ -773,6 +777,15 @@
773777
<exclude>org/objenesis/*.class</exclude>
774778
</excludes>
775779
</filter>
780+
<!-- Additional filters to exclude unexpected contents -->
781+
<filter>
782+
<artifact>*:*</artifact>
783+
<excludes>
784+
<exclude>mockito-extensions/**</exclude>
785+
<exclude>win32-x86/**</exclude>
786+
<exclude>win32-x86-64/**</exclude>
787+
</excludes>
788+
</filter>
776789
<!-- skip grizzly internals we don't need to run. -->
777790
<filter>
778791
<artifact>org.glassfish.grizzly:grizzly-http-servlet</artifact>

hadoop-client-modules/hadoop-client-runtime/pom.xml

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -229,8 +229,9 @@
229229
<exclude>jnamed*</exclude>
230230
<exclude>lookup*</exclude>
231231
<exclude>update*</exclude>
232-
<exclude>META-INF/versions/21/*</exclude>
233-
<exclude>META-INF/versions/21/**/*</exclude>
232+
<exclude>META-INF/versions/18/*</exclude>
233+
<exclude>META-INF/versions/18/**/*</exclude>
234+
<exclude>META-INF/services/java.net.spi.InetAddressResolverProvider</exclude>
234235
</excludes>
235236
</filter>
236237
<filter>
@@ -245,7 +246,7 @@
245246
<excludes>
246247
<exclude>META-INF/versions/9/module-info.class</exclude>
247248
<exclude>META-INF/versions/11/module-info.class</exclude>
248-
<exclude>META-INF/versions/21/module-info.class</exclude>
249+
<exclude>META-INF/versions/18/module-info.class</exclude>
249250
</excludes>
250251
</filter>
251252

hadoop-cloud-storage-project/hadoop-huaweicloud/pom.xml

Lines changed: 15 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -173,15 +173,27 @@
173173
</dependency>
174174
<dependency>
175175
<groupId>org.powermock</groupId>
176-
<artifactId>powermock-api-mockito</artifactId>
177-
<version>1.7.4</version>
176+
<artifactId>powermock-api-mockito2</artifactId>
177+
<version>2.0.9</version>
178178
<scope>test</scope>
179+
<exclusions>
180+
<exclusion>
181+
<groupId>org.mockito</groupId>
182+
<artifactId>mockito-core</artifactId>
183+
</exclusion>
184+
</exclusions>
179185
</dependency>
180186
<dependency>
181187
<groupId>org.powermock</groupId>
182188
<artifactId>powermock-module-junit4</artifactId>
183-
<version>1.7.4</version>
189+
<version>2.0.9</version>
184190
<scope>test</scope>
191+
<exclusions>
192+
<exclusion>
193+
<groupId>org.mockito</groupId>
194+
<artifactId>mockito-core</artifactId>
195+
</exclusion>
196+
</exclusions>
185197
</dependency>
186198
</dependencies>
187199
</project>

hadoop-common-project/hadoop-auth/pom.xml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,7 @@
4949
<dependency>
5050
<groupId>org.mockito</groupId>
5151
<artifactId>mockito-core</artifactId>
52+
<version>4.11.0</version>
5253
<scope>test</scope>
5354
</dependency>
5455
<dependency>
@@ -182,6 +183,10 @@
182183
<groupId>log4j</groupId>
183184
<artifactId>log4j</artifactId>
184185
</exclusion>
186+
<exclusion>
187+
<groupId>commons-collections</groupId>
188+
<artifactId>commons-collections</artifactId>
189+
</exclusion>
185190
</exclusions>
186191
</dependency>
187192
<dependency>

hadoop-common-project/hadoop-common/pom.xml

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -88,8 +88,8 @@
8888
<scope>compile</scope>
8989
</dependency>
9090
<dependency>
91-
<groupId>commons-collections</groupId>
92-
<artifactId>commons-collections</artifactId>
91+
<groupId>org.apache.commons</groupId>
92+
<artifactId>commons-collections4</artifactId>
9393
<scope>compile</scope>
9494
</dependency>
9595
<dependency>
@@ -211,6 +211,12 @@
211211
<groupId>commons-beanutils</groupId>
212212
<artifactId>commons-beanutils</artifactId>
213213
<scope>compile</scope>
214+
<exclusions>
215+
<exclusion>
216+
<groupId>commons-collections</groupId>
217+
<artifactId>commons-collections</artifactId>
218+
</exclusion>
219+
</exclusions>
214220
</dependency>
215221
<dependency>
216222
<groupId>org.apache.commons</groupId>
@@ -245,7 +251,7 @@
245251
</dependency>
246252
<dependency>
247253
<groupId>org.mockito</groupId>
248-
<artifactId>mockito-core</artifactId>
254+
<artifactId>mockito-inline</artifactId>
249255
<scope>test</scope>
250256
</dependency>
251257
<dependency>

hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1569,6 +1569,28 @@ function hadoop_finalize_hadoop_opts
15691569
hadoop_add_param HADOOP_OPTS hadoop.security.logger "-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER}"
15701570
}
15711571

1572+
## @description Finish configuring JPMS that enforced for JDK 17 and higher
1573+
## @description prior to executing Java
1574+
## @description keep this list sync with hadoop-project/pom.xml extraJavaTestArgs
1575+
## @audience private
1576+
## @stability evolving
1577+
## @replaceable yes
1578+
function hadoop_finalize_jpms_opts
1579+
{
1580+
hadoop_add_param HADOOP_OPTS IgnoreUnrecognizedVMOptions "-XX:+IgnoreUnrecognizedVMOptions"
1581+
hadoop_add_param HADOOP_OPTS open.java.io "--add-opens=java.base/java.io=ALL-UNNAMED"
1582+
hadoop_add_param HADOOP_OPTS open.java.lang "--add-opens=java.base/java.lang=ALL-UNNAMED"
1583+
hadoop_add_param HADOOP_OPTS open.java.lang.reflect "--add-opens=java.base/java.lang.reflect=ALL-UNNAMED"
1584+
hadoop_add_param HADOOP_OPTS open.java.math "--add-opens=java.base/java.math=ALL-UNNAMED"
1585+
hadoop_add_param HADOOP_OPTS open.java.net "--add-opens=java.base/java.net=ALL-UNNAMED"
1586+
hadoop_add_param HADOOP_OPTS open.java.text "--add-opens=java.base/java.text=ALL-UNNAMED"
1587+
hadoop_add_param HADOOP_OPTS open.java.util "--add-opens=java.base/java.util=ALL-UNNAMED"
1588+
hadoop_add_param HADOOP_OPTS open.java.util.concurrent "--add-opens=java.base/java.util.concurrent=ALL-UNNAMED"
1589+
hadoop_add_param HADOOP_OPTS open.java.util.zip "--add-opens=java.base/java.util.zip=ALL-UNNAMED"
1590+
hadoop_add_param HADOOP_OPTS open.sun.security.util "--add-opens=java.base/sun.security.util=ALL-UNNAMED"
1591+
hadoop_add_param HADOOP_OPTS open.sun.security.x509 "--add-opens=java.base/sun.security.x509=ALL-UNNAMED"
1592+
}
1593+
15721594
## @description Finish Java classpath prior to execution
15731595
## @audience private
15741596
## @stability evolving
@@ -1597,6 +1619,7 @@ function hadoop_finalize
15971619
hadoop_finalize_libpaths
15981620
hadoop_finalize_hadoop_heap
15991621
hadoop_finalize_hadoop_opts
1622+
hadoop_finalize_jpms_opts
16001623

16011624
hadoop_translate_cygwin_path HADOOP_HOME
16021625
hadoop_translate_cygwin_path HADOOP_CONF_DIR

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@
8484
import javax.xml.transform.dom.DOMSource;
8585
import javax.xml.transform.stream.StreamResult;
8686

87-
import org.apache.commons.collections.map.UnmodifiableMap;
87+
import org.apache.commons.collections4.map.UnmodifiableMap;
8888
import org.apache.hadoop.classification.InterfaceAudience;
8989
import org.apache.hadoop.classification.InterfaceStability;
9090
import org.apache.hadoop.classification.VisibleForTesting;
@@ -511,9 +511,9 @@ private static class DeprecationContext {
511511
}
512512
}
513513
this.deprecatedKeyMap =
514-
UnmodifiableMap.decorate(newDeprecatedKeyMap);
514+
UnmodifiableMap.unmodifiableMap(newDeprecatedKeyMap);
515515
this.reverseDeprecatedKeyMap =
516-
UnmodifiableMap.decorate(newReverseDeprecatedKeyMap);
516+
UnmodifiableMap.unmodifiableMap(newReverseDeprecatedKeyMap);
517517
}
518518

519519
Map<String, DeprecatedKeyInfo> getDeprecatedKeyMap() {

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -770,7 +770,7 @@ public FSDataOutputStream createNonRecursive(final Path f,
770770
abstract class FsOperation {
771771
boolean run(Path p) throws IOException {
772772
boolean status = apply(p);
773-
if (status) {
773+
if (status && !p.isRoot()) {
774774
Path checkFile = getChecksumFile(p);
775775
if (fs.exists(checkFile)) {
776776
apply(checkFile);

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -736,6 +736,12 @@ public class CommonConfigurationKeysPublic {
736736
*/
737737
public static final String HADOOP_RPC_PROTECTION =
738738
"hadoop.rpc.protection";
739+
public static final String HADOOP_SECURITY_SASL_MECHANISM_KEY
740+
= "hadoop.security.sasl.mechanism";
741+
public static final String HADOOP_SECURITY_SASL_MECHANISM_DEFAULT
742+
= "DIGEST-MD5";
743+
public static final String HADOOP_SECURITY_SASL_CUSTOMIZEDCALLBACKHANDLER_CLASS_KEY
744+
= "hadoop.security.sasl.CustomizedCallbackHandler.class";
739745
/** Class to override Sasl Properties for a connection */
740746
public static final String HADOOP_SECURITY_SASL_PROPS_RESOLVER_CLASS =
741747
"hadoop.security.saslproperties.resolver.class";

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java

Lines changed: 20 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@
5757
import java.util.jar.Manifest;
5858
import java.util.zip.GZIPInputStream;
5959

60-
import org.apache.commons.collections.map.CaseInsensitiveMap;
60+
import org.apache.commons.collections4.map.CaseInsensitiveMap;
6161
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
6262
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
6363
import org.apache.commons.compress.archivers.zip.ZipArchiveEntry;
@@ -2108,4 +2108,23 @@ public static void maybeIgnoreMissingDirectory(FileSystem fs,
21082108
LOG.info("Ignoring missing directory {}", path);
21092109
LOG.debug("Directory missing", e);
21102110
}
2111+
2112+
/**
2113+
* Return true if the FS implements {@link WithErasureCoding} and
2114+
* supports EC_POLICY option in {@link Options.OpenFileOptions}.
2115+
* A message is logged when the filesystem does not support Erasure coding.
2116+
* @param fs filesystem
2117+
* @param path path
2118+
* @return true if the Filesystem supports EC
2119+
* @throws IOException if there is a failure in hasPathCapability call
2120+
*/
2121+
public static boolean checkFSSupportsEC(FileSystem fs, Path path) throws IOException {
2122+
if (fs instanceof WithErasureCoding &&
2123+
fs.hasPathCapability(path, Options.OpenFileOptions.FS_OPTION_OPENFILE_EC_POLICY)) {
2124+
return true;
2125+
}
2126+
LOG.warn("Filesystem with scheme {} does not support Erasure Coding" +
2127+
" at path {}", fs.getScheme(), path);
2128+
return false;
2129+
}
21112130
}

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploader.java

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,6 +57,7 @@ CompletableFuture<UploadHandle> startUpload(Path filePath)
5757
* It is possible to have parts uploaded in any order (or in parallel).
5858
* @param uploadId Identifier from {@link #startUpload(Path)}.
5959
* @param partNumber Index of the part relative to others.
60+
* @param isLastPart is the part the last part of the upload?
6061
* @param filePath Target path for upload (as {@link #startUpload(Path)}).
6162
* @param inputStream Data for this part. Implementations MUST close this
6263
* stream after reading in the data.
@@ -67,6 +68,7 @@ CompletableFuture<UploadHandle> startUpload(Path filePath)
6768
CompletableFuture<PartHandle> putPart(
6869
UploadHandle uploadId,
6970
int partNumber,
71+
boolean isLastPart,
7072
Path filePath,
7173
InputStream inputStream,
7274
long lengthInBytes)
@@ -77,7 +79,7 @@ CompletableFuture<PartHandle> putPart(
7779
* @param uploadId Identifier from {@link #startUpload(Path)}.
7880
* @param filePath Target path for upload (as {@link #startUpload(Path)}.
7981
* @param handles non-empty map of part number to part handle.
80-
* from {@link #putPart(UploadHandle, int, Path, InputStream, long)}.
82+
* from {@link #putPart(UploadHandle, int, boolean, Path, InputStream, long)}.
8183
* @return unique PathHandle identifier for the uploaded file.
8284
* @throws IOException IO failure
8385
*/

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -704,5 +704,10 @@ private OpenFileOptions() {
704704
FS_OPTION_OPENFILE_READ_POLICY_WHOLE_FILE)
705705
.collect(Collectors.toSet()));
706706

707+
/**
708+
* EC policy to be set on the file that needs to be created : {@value}.
709+
*/
710+
public static final String FS_OPTION_OPENFILE_EC_POLICY =
711+
FS_OPTION_OPENFILE + "ec.policy";
707712
}
708713
}

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,8 @@
6868
import org.apache.hadoop.util.Shell;
6969
import org.apache.hadoop.util.StringUtils;
7070

71-
import static org.apache.hadoop.fs.VectoredReadUtils.validateAndSortRanges;
71+
import static org.apache.hadoop.fs.VectoredReadUtils.sortRangeList;
72+
import static org.apache.hadoop.fs.VectoredReadUtils.validateRangeRequest;
7273
import static org.apache.hadoop.fs.impl.PathCapabilitiesSupport.validatePathCapabilityArgs;
7374
import static org.apache.hadoop.fs.statistics.StreamStatisticNames.STREAM_READ_BYTES;
7475
import static org.apache.hadoop.fs.statistics.StreamStatisticNames.STREAM_READ_EXCEPTIONS;
@@ -320,10 +321,10 @@ public void readVectored(List<? extends FileRange> ranges,
320321
IntFunction<ByteBuffer> allocate) throws IOException {
321322

322323
// Validate, but do not pass in a file length as it may change.
323-
List<? extends FileRange> sortedRanges = validateAndSortRanges(ranges,
324-
Optional.empty());
324+
List<? extends FileRange> sortedRanges = sortRangeList(ranges);
325325
// Set up all of the futures, so that we can use them if things fail
326326
for(FileRange range: sortedRanges) {
327+
validateRangeRequest(range);
327328
range.setData(new CompletableFuture<>());
328329
}
329330
try {
Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
package org.apache.hadoop.fs;
20+
21+
import java.io.IOException;
22+
23+
/**
24+
* Filesystems that support EC can implement this interface.
25+
*/
26+
public interface WithErasureCoding {
27+
28+
/**
29+
* Get the EC Policy name of the given file's fileStatus.
30+
* If the file is not erasure coded, this shall return null.
31+
* Callers will make sure to check if fileStatus isInstance of
32+
* an FS that implements this interface.
33+
* If the call fails due to some error, this shall return null.
34+
* @param fileStatus object of the file whose ecPolicy needs to be obtained.
35+
* @return the ec Policy name
36+
*/
37+
String getErasureCodingPolicyName(FileStatus fileStatus);
38+
39+
/**
40+
* Set the given ecPolicy on the path.
41+
* The path and ecPolicyName should be valid (not null/empty, the
42+
* implementing FS shall support the supplied ecPolicy).
43+
* implementations can throw IOException if these conditions are not met.
44+
* @param path on which the EC policy needs to be set.
45+
* @param ecPolicyName the EC policy.
46+
* @throws IOException if there is an error during the set op.
47+
*/
48+
void setErasureCodingPolicy(Path path, String ecPolicyName) throws
49+
IOException;
50+
}

0 commit comments

Comments
 (0)