diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/constants/FileSystemConfigurations.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/constants/FileSystemConfigurations.java index 1a0ee626f1834..b3825b4c53ec1 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/constants/FileSystemConfigurations.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/constants/FileSystemConfigurations.java @@ -59,7 +59,7 @@ public final class FileSystemConfigurations { public static final boolean DEFAULT_AZURE_ENABLE_SMALL_WRITE_OPTIMIZATION = false; public static final int DEFAULT_READ_BUFFER_SIZE = 4 * ONE_MB; // 4 MB public static final boolean DEFAULT_READ_SMALL_FILES_COMPLETELY = false; - public static final boolean DEFAULT_OPTIMIZE_FOOTER_READ = false; + public static final boolean DEFAULT_OPTIMIZE_FOOTER_READ = true; public static final int DEFAULT_FOOTER_READ_BUFFER_SIZE = 512 * ONE_KB; public static final boolean DEFAULT_ALWAYS_READ_BUFFER_SIZE = false; public static final int DEFAULT_READ_AHEAD_BLOCK_SIZE = 4 * ONE_MB; diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsInputStream.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsInputStream.java index 6d3bcaba95e70..dcdf8d7ea17d6 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsInputStream.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsInputStream.java @@ -360,6 +360,7 @@ private int readFileCompletely(final byte[] b, final int off, final int len) return optimisedRead(b, off, len, 0, contentLength); } + // To do footer read of files when enabled private int readLastBlock(final byte[] b, final int off, final int len) throws IOException { if (len == 0) { @@ -815,7 +816,7 @@ public int getBufferSize() { } @VisibleForTesting - public int getFooterReadBufferSize() { + protected int getFooterReadBufferSize() { return footerReadSize; } diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsReadWriteAndSeek.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsReadWriteAndSeek.java index beada775ae87b..bc420c6a1f8cd 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsReadWriteAndSeek.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsReadWriteAndSeek.java @@ -84,6 +84,7 @@ private void testReadWriteAndSeek(int bufferSize) throws Exception { abfsConfiguration.setWriteBufferSize(bufferSize); abfsConfiguration.setReadBufferSize(bufferSize); abfsConfiguration.setReadAheadEnabled(readaheadEnabled); + abfsConfiguration.setOptimizeFooterRead(false); final byte[] b = new byte[2 * bufferSize]; new Random().nextBytes(b); diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/ITestAbfsInputStream.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/ITestAbfsInputStream.java index 2ac58fbcb1668..11b14162eb2f9 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/ITestAbfsInputStream.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/ITestAbfsInputStream.java @@ -169,6 +169,8 @@ protected AzureBlobFileSystem getFileSystem(boolean readSmallFilesCompletely) final AzureBlobFileSystem fs = getFileSystem(); getAbfsStore(fs).getAbfsConfiguration() .setReadSmallFilesCompletely(readSmallFilesCompletely); + getAbfsStore(fs).getAbfsConfiguration() + .setOptimizeFooterRead(false); return fs; } diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/ITestAbfsInputStreamReadFooter.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/ITestAbfsInputStreamReadFooter.java index 3adf2cbaa5ad1..fbf469bbcf7d9 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/ITestAbfsInputStreamReadFooter.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/ITestAbfsInputStreamReadFooter.java @@ -64,7 +64,7 @@ public void testMultipleServerCallsAreMadeWhenTheConfIsFalse() private void testNumBackendCalls(boolean optimizeFooterRead) throws Exception { for (int i = 0; i <= 5; i++) { - int fileSize = (int)Math.pow(2, i) * 256 * ONE_KB; + int fileSize = (int) Math.pow(2, i) * 256 * ONE_KB; final AzureBlobFileSystem fs = getFileSystem(optimizeFooterRead, fileSize); String fileName = methodName.getMethodName() + i; @@ -157,7 +157,7 @@ private void testSeekAndReadWithConf(boolean optimizeFooterRead, // Files between footer read buffer and read buffer size // Files bigger than read buffer size for (int i = 0; i <= 5; i++) { - int fileSize = (int)Math.pow(2, i) * 256 * ONE_KB; + int fileSize = (int) Math.pow(2, i) * 256 * ONE_KB; final AzureBlobFileSystem fs = getFileSystem(optimizeFooterRead, fileSize); String fileName = methodName.getMethodName() + i; @@ -254,7 +254,7 @@ private void seekReadAndTest(final FileSystem fs, final Path testFilePath, public void testPartialReadWithNoData() throws Exception { for (int i = 0; i <= 5; i++) { - int fileSize = (int)Math.pow(2, i) * 256 * ONE_KB; + int fileSize = (int) Math.pow(2, i) * 256 * ONE_KB; final AzureBlobFileSystem fs = getFileSystem(true, fileSize); String fileName = methodName.getMethodName() + i; byte[] fileContent = getRandomBytesArray(fileSize); @@ -297,7 +297,7 @@ private void testPartialReadWithNoData(final FileSystem fs, public void testPartialReadWithSomeData() throws Exception { for (int i = 0; i <= 5; i++) { - int fileSize = (int)Math.pow(2, i) * 256 * ONE_KB; + int fileSize = (int) Math.pow(2, i) * 256 * ONE_KB; final AzureBlobFileSystem fs = getFileSystem(true, fileSize); String fileName = methodName.getMethodName() + i; byte[] fileContent = getRandomBytesArray(fileSize);