diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureIOSupport.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureIOSupport.java
index b462a2b63a626..f47e5f4fbfbd6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureIOSupport.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureIOSupport.java
@@ -36,14 +36,16 @@
/**
* Support for future IO and the FS Builder subclasses.
- * If methods in here are needed for applications, promote
- * to {@link FutureIO} for public use -with the original
- * method relaying to it. This is to ensure that external
- * filesystem implementations can safely use these methods
+ * All methods in this class have been superceded by those in
+ * {@link FutureIO}.
+ * The methods here are retained but all marked as deprecated.
+ * This is to ensure that any external
+ * filesystem implementations can still use these methods
* without linkage problems surfacing.
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
+@Deprecated
public final class FutureIOSupport {
private FutureIOSupport() {
@@ -60,7 +62,8 @@ private FutureIOSupport() {
* @throws IOException if something went wrong
* @throws RuntimeException any nested RTE thrown
*/
- public static T awaitFuture(final Future future)
+ @Deprecated
+ public static T awaitFuture(final Future future)
throws InterruptedIOException, IOException, RuntimeException {
return FutureIO.awaitFuture(future);
}
@@ -78,6 +81,7 @@ public static T awaitFuture(final Future future)
* @throws RuntimeException any nested RTE thrown
* @throws TimeoutException the future timed out.
*/
+ @Deprecated
public static T awaitFuture(final Future future,
final long timeout,
final TimeUnit unit)
@@ -97,6 +101,7 @@ public static T awaitFuture(final Future future,
* any non-Runtime-Exception
* @throws RuntimeException if that is the inner cause.
*/
+ @Deprecated
public static T raiseInnerCause(final ExecutionException e)
throws IOException {
return FutureIO.raiseInnerCause(e);
@@ -113,6 +118,7 @@ public static T raiseInnerCause(final ExecutionException e)
* any non-Runtime-Exception
* @throws RuntimeException if that is the inner cause.
*/
+ @Deprecated
public static T raiseInnerCause(final CompletionException e)
throws IOException {
return FutureIO.raiseInnerCause(e);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/WrappedIOException.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/WrappedIOException.java
index d2c999683c6c6..2fbcd33beac6c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/WrappedIOException.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/WrappedIOException.java
@@ -27,13 +27,11 @@
import org.apache.hadoop.classification.InterfaceStability;
/**
- * A wrapper for an IOException which
- * {@link FutureIOSupport#raiseInnerCause(ExecutionException)} knows to
- * always extract the exception.
+ * A wrapper for an IOException.
*
* The constructor signature guarantees the cause will be an IOException,
* and as it checks for a null-argument, non-null.
- * @deprecated use the {@code UncheckedIOException}.
+ * @deprecated use the {@code UncheckedIOException} directly.]
*/
@Deprecated
@InterfaceAudience.Private
@@ -51,8 +49,4 @@ public WrappedIOException(final IOException cause) {
super(Preconditions.checkNotNull(cause));
}
- @Override
- public synchronized IOException getCause() {
- return (IOException) super.getCause();
- }
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/CommonCallableSupplier.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/CommonCallableSupplier.java
index e2cdc0fd41472..32e299b4d45b1 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/CommonCallableSupplier.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/CommonCallableSupplier.java
@@ -34,7 +34,7 @@
import org.apache.hadoop.util.DurationInfo;
-import static org.apache.hadoop.fs.impl.FutureIOSupport.raiseInnerCause;
+import static org.apache.hadoop.util.functional.FutureIO.raiseInnerCause;
/**
* A bridge from Callable to Supplier; catching exceptions
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/FutureIO.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/FutureIO.java
index 2d9fd9729cdfe..c3fda19d8d73b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/FutureIO.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/FutureIO.java
@@ -90,6 +90,8 @@ public static T awaitFuture(final Future future)
* extracted and rethrown.
*
* @param future future to evaluate
+ * @param timeout timeout to wait
+ * @param unit time unit.
* @param type of the result.
* @return the result, if all went well.
* @throws InterruptedIOException future was interrupted
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java
index c94eceabfa33d..57c58ba35a1d1 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java
@@ -104,7 +104,7 @@ public synchronized void parse(HistoryEventHandler handler)
* Only used for unit tests.
*/
@Private
- public synchronized void parse(EventReader reader, HistoryEventHandler handler)
+ public synchronized void parse(EventReader reader, HistoryEventHandler handler)
throws IOException {
int eventCtr = 0;
HistoryEvent event;
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FixedLengthRecordReader.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FixedLengthRecordReader.java
index 07264eae62e39..6969f61836fbc 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FixedLengthRecordReader.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FixedLengthRecordReader.java
@@ -40,6 +40,7 @@
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.util.functional.FutureIO;
+
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java
index 2a3650edbf91d..e5369b848830a 100644
--- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java
+++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java
@@ -630,6 +630,9 @@ private Constants() {
* The default value for this FS.
* Which for S3A, is adaptive.
* Value: {@value}
+ * @deprecated use the {@link Options.OpenFileOptions} value
+ * in code which only needs to be compiled against newer hadoop
+ * releases.
*/
public static final String INPUT_FADV_DEFAULT =
Options.OpenFileOptions.FS_OPTION_OPENFILE_READ_POLICY_DEFAULT;
@@ -645,6 +648,9 @@ private Constants() {
/**
* Optimized for sequential access.
* Value: {@value}
+ * @deprecated use the {@link Options.OpenFileOptions} value
+ * in code which only needs to be compiled against newer hadoop
+ * releases.
*/
public static final String INPUT_FADV_SEQUENTIAL =
Options.OpenFileOptions.FS_OPTION_OPENFILE_READ_POLICY_SEQUENTIAL;
@@ -654,6 +660,9 @@ private Constants() {
* The performance of sequential IO may be reduced in exchange for
* more efficient {@code seek()} operations.
* Value: {@value}
+ * @deprecated use the {@link Options.OpenFileOptions} value
+ * in code which only needs to be compiled against newer hadoop
+ * releases.
*/
public static final String INPUT_FADV_RANDOM =
Options.OpenFileOptions.FS_OPTION_OPENFILE_READ_POLICY_RANDOM;
diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java
index 9f20d2a49c9f3..15e240f901865 100644
--- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java
+++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java
@@ -90,6 +90,7 @@
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Globber;
+import org.apache.hadoop.fs.Options;
import org.apache.hadoop.fs.impl.OpenFileParameters;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.s3a.audit.AuditSpanS3A;
@@ -516,7 +517,8 @@ public void initialize(URI name, Configuration originalConf)
doBucketProbing();
inputPolicy = S3AInputPolicy.getPolicy(
- conf.getTrimmed(INPUT_FADVISE, INPUT_FADV_DEFAULT),
+ conf.getTrimmed(INPUT_FADVISE,
+ Options.OpenFileOptions.FS_OPTION_OPENFILE_READ_POLICY_DEFAULT),
S3AInputPolicy.Normal);
LOG.debug("Input fadvise policy = {}", inputPolicy);
changeDetectionPolicy = ChangeDetectionPolicy.getPolicy(conf);
diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AInputPolicy.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AInputPolicy.java
index 9bf297913cc93..b90d0f2a61605 100644
--- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AInputPolicy.java
+++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AInputPolicy.java
@@ -40,8 +40,8 @@
public enum S3AInputPolicy {
Normal(FS_OPTION_OPENFILE_READ_POLICY_DEFAULT, false, true),
- Random(FS_OPTION_OPENFILE_READ_POLICY_RANDOM, false, false),
- Sequential(FS_OPTION_OPENFILE_READ_POLICY_SEQUENTIAL, true, false);
+ Random(FS_OPTION_OPENFILE_READ_POLICY_RANDOM, true, false),
+ Sequential(FS_OPTION_OPENFILE_READ_POLICY_SEQUENTIAL, false, false);
/** Policy name. */
private final String policy;
diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Statistic.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Statistic.java
index 7c91d6cdebaec..86cb18076cc6c 100644
--- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Statistic.java
+++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Statistic.java
@@ -306,11 +306,11 @@ public enum Statistic {
TYPE_COUNTER),
STREAM_READ_REMOTE_STREAM_ABORTED(
StreamStatisticNames.STREAM_READ_REMOTE_STREAM_ABORTED,
- "Count/duration of aborting a remote stream during stream IO",
+ "Duration of aborting a remote stream during stream IO",
TYPE_DURATION),
STREAM_READ_REMOTE_STREAM_CLOSED(
StreamStatisticNames.STREAM_READ_REMOTE_STREAM_DRAINED,
- "Count/duration of closing a remote stream during stream IO",
+ "Duration of closing a remote stream during stream IO",
TYPE_DURATION),
STREAM_READ_OPERATIONS_INCOMPLETE(
diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/s3guard/S3GuardTool.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/s3guard/S3GuardTool.java
index f309f967e1343..1d52b0a34ea70 100644
--- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/s3guard/S3GuardTool.java
+++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/s3guard/S3GuardTool.java
@@ -46,6 +46,7 @@
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FilterFileSystem;
+import org.apache.hadoop.fs.Options;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.s3a.Constants;
import org.apache.hadoop.fs.s3a.MultipartUtils;
@@ -426,7 +427,8 @@ public int run(String[] args, PrintStream out)
String encryption =
printOption(out, "\tEncryption", Constants.S3_ENCRYPTION_ALGORITHM,
"none");
- printOption(out, "\tInput seek policy", INPUT_FADVISE, INPUT_FADV_DEFAULT);
+ printOption(out, "\tInput seek policy", INPUT_FADVISE,
+ Options.OpenFileOptions.FS_OPTION_OPENFILE_READ_POLICY_DEFAULT);
printOption(out, "\tChange Detection Source", CHANGE_DETECT_SOURCE,
CHANGE_DETECT_SOURCE_DEFAULT);
printOption(out, "\tChange Detection Mode", CHANGE_DETECT_MODE,
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractSeek.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractSeek.java
index 45811385d606a..dd41583de3fe4 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractSeek.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractSeek.java
@@ -44,11 +44,11 @@
import org.apache.hadoop.security.ssl.DelegatingSSLSocketFactory;
import org.apache.hadoop.util.NativeCodeLoader;
+import static org.apache.hadoop.fs.Options.OpenFileOptions.FS_OPTION_OPENFILE_READ_POLICY_DEFAULT;
+import static org.apache.hadoop.fs.Options.OpenFileOptions.FS_OPTION_OPENFILE_READ_POLICY_RANDOM;
+import static org.apache.hadoop.fs.Options.OpenFileOptions.FS_OPTION_OPENFILE_READ_POLICY_SEQUENTIAL;
import static org.apache.hadoop.util.Preconditions.checkNotNull;
-import static org.apache.hadoop.fs.s3a.Constants.INPUT_FADV_DEFAULT;
import static org.apache.hadoop.fs.s3a.Constants.INPUT_FADVISE;
-import static org.apache.hadoop.fs.s3a.Constants.INPUT_FADV_RANDOM;
-import static org.apache.hadoop.fs.s3a.Constants.INPUT_FADV_SEQUENTIAL;
import static org.apache.hadoop.fs.s3a.Constants.READAHEAD_RANGE;
import static org.apache.hadoop.fs.s3a.Constants.SSL_CHANNEL_MODE;
import static org.apache.hadoop.fs.s3a.S3ATestConstants.FS_S3A_IMPL_DISABLE_CACHE;
@@ -87,9 +87,9 @@ public class ITestS3AContractSeek extends AbstractContractSeekTest {
@Parameterized.Parameters
public static Collection