Skip to content

Commit 5e10970

Browse files
sumangala-patkisteveloughran
authored andcommitted
HADOOP-17765. ABFS: Use Unique File Paths in Tests. (#3153)
Contributed by Sumangala Patki Change-Id: Ic8f34bf578069504f7a811a7729982b9c9f49729
1 parent 74f5f90 commit 5e10970

30 files changed

+349
-269
lines changed

hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/AbstractAbfsIntegrationTest.java

Lines changed: 18 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -26,11 +26,11 @@
2626
import java.util.concurrent.Callable;
2727

2828
import org.junit.After;
29-
import org.junit.Assert;
3029
import org.junit.Before;
3130
import org.slf4j.Logger;
3231
import org.slf4j.LoggerFactory;
3332

33+
import org.apache.commons.lang3.StringUtils;
3434
import org.apache.hadoop.conf.Configuration;
3535
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
3636
import org.apache.hadoop.fs.FileSystem;
@@ -84,6 +84,7 @@ public abstract class AbstractAbfsIntegrationTest extends
8484
private AuthType authType;
8585
private boolean useConfiguredFileSystem = false;
8686
private boolean usingFilesystemForSASTests = false;
87+
private static final int SHORTENED_GUID_LEN = 12;
8788

8889
protected AbstractAbfsIntegrationTest() throws Exception {
8990
fileSystemName = TEST_CONTAINER_PREFIX + UUID.randomUUID().toString();
@@ -270,7 +271,8 @@ protected void createFilesystemForSASTests() throws Exception {
270271
// so first create temporary instance of the filesystem using SharedKey
271272
// then re-use the filesystem it creates with SAS auth instead of SharedKey.
272273
AzureBlobFileSystem tempFs = (AzureBlobFileSystem) FileSystem.newInstance(rawConfig);
273-
Assert.assertTrue(tempFs.exists(new Path("/")));
274+
ContractTestUtils.assertPathExists(tempFs, "This path should exist",
275+
new Path("/"));
274276
abfsConfig.set(FS_AZURE_ACCOUNT_AUTH_TYPE_PROPERTY_NAME, AuthType.SAS.name());
275277
usingFilesystemForSASTests = true;
276278
}
@@ -440,7 +442,20 @@ public Path makeQualified(Path path) throws java.io.IOException {
440442
*/
441443
protected Path path(String filepath) throws IOException {
442444
return getFileSystem().makeQualified(
443-
new Path(getTestPath(), filepath));
445+
new Path(getTestPath(), getUniquePath(filepath)));
446+
}
447+
448+
/**
449+
* Generate a unique path using the given filepath.
450+
* @param filepath path string
451+
* @return unique path created from filepath and a GUID
452+
*/
453+
protected Path getUniquePath(String filepath) {
454+
if (filepath.equals("/")) {
455+
return new Path(filepath);
456+
}
457+
return new Path(filepath + StringUtils
458+
.right(UUID.randomUUID().toString(), SHORTENED_GUID_LEN));
444459
}
445460

446461
/**

hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsClient.java

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,7 @@ public void testUnknownHost() throws Exception {
9393
public void testListPathWithValidListMaxResultsValues()
9494
throws IOException, ExecutionException, InterruptedException {
9595
final int fileCount = 10;
96-
final String directory = "testWithValidListMaxResultsValues";
96+
final Path directory = getUniquePath("testWithValidListMaxResultsValues");
9797
createDirectoryWithNFiles(directory, fileCount);
9898
final int[] testData = {fileCount + 100, fileCount + 1, fileCount,
9999
fileCount - 1, 1};
@@ -102,7 +102,7 @@ public void testListPathWithValidListMaxResultsValues()
102102
setListMaxResults(listMaxResults);
103103
int expectedListResultsSize =
104104
listMaxResults > fileCount ? fileCount : listMaxResults;
105-
Assertions.assertThat(listPath(directory)).describedAs(
105+
Assertions.assertThat(listPath(directory.toString())).describedAs(
106106
"AbfsClient.listPath result should contain %d items when "
107107
+ "listMaxResults is %d and directory contains %d items",
108108
expectedListResultsSize, listMaxResults, fileCount)
@@ -114,9 +114,10 @@ public void testListPathWithValidListMaxResultsValues()
114114
public void testListPathWithValueGreaterThanServerMaximum()
115115
throws IOException, ExecutionException, InterruptedException {
116116
setListMaxResults(LIST_MAX_RESULTS_SERVER + 100);
117-
final String directory = "testWithValueGreaterThanServerMaximum";
117+
final Path directory = getUniquePath(
118+
"testWithValueGreaterThanServerMaximum");
118119
createDirectoryWithNFiles(directory, LIST_MAX_RESULTS_SERVER + 200);
119-
Assertions.assertThat(listPath(directory)).describedAs(
120+
Assertions.assertThat(listPath(directory.toString())).describedAs(
120121
"AbfsClient.listPath result will contain a maximum of %d items "
121122
+ "even if listMaxResults >= %d or directory "
122123
+ "contains more than %d items", LIST_MAX_RESULTS_SERVER,
@@ -152,7 +153,7 @@ private void setListMaxResults(int listMaxResults) throws IOException {
152153
.setListMaxResults(listMaxResults);
153154
}
154155

155-
private void createDirectoryWithNFiles(String directory, int n)
156+
private void createDirectoryWithNFiles(Path directory, int n)
156157
throws ExecutionException, InterruptedException {
157158
final List<Future<Void>> tasks = new ArrayList<>();
158159
ExecutorService es = Executors.newFixedThreadPool(10);

hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsListStatusRemoteIterator.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -237,8 +237,8 @@ public void testHasNextForEmptyDir() throws Exception {
237237
@Test
238238
public void testHasNextForFile() throws Exception {
239239
final AzureBlobFileSystem fs = getFileSystem();
240-
String testFileName = "testFile";
241-
Path testFile = new Path(testFileName);
240+
Path testFile = path("testFile");
241+
String testFileName = testFile.toString();
242242
getFileSystem().create(testFile);
243243
setPageSize(10);
244244
RemoteIterator<FileStatus> fsItr = fs.listStatusIterator(testFile);
@@ -304,7 +304,7 @@ public String listStatus(Path path, String startFrom,
304304

305305
private Path createTestDirectory() throws IOException {
306306
String testDirectoryName = "testDirectory" + System.currentTimeMillis();
307-
Path testDirectory = new Path(testDirectoryName);
307+
Path testDirectory = path(testDirectoryName);
308308
getFileSystem().mkdirs(testDirectory);
309309
return testDirectory;
310310
}

hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsReadWriteAndSeek.java

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@
4545
*/
4646
@RunWith(Parameterized.class)
4747
public class ITestAbfsReadWriteAndSeek extends AbstractAbfsScaleTest {
48-
private static final Path TEST_PATH = new Path("/testfile");
48+
private static final String TEST_PATH = "/testfile";
4949

5050
@Parameterized.Parameters(name = "Size={0}")
5151
public static Iterable<Object[]> sizes() {
@@ -75,13 +75,14 @@ private void testReadWriteAndSeek(int bufferSize) throws Exception {
7575
final byte[] b = new byte[2 * bufferSize];
7676
new Random().nextBytes(b);
7777

78-
try (FSDataOutputStream stream = fs.create(TEST_PATH)) {
78+
Path testPath = path(TEST_PATH);
79+
try (FSDataOutputStream stream = fs.create(testPath)) {
7980
stream.write(b);
8081
}
8182

8283
final byte[] readBuffer = new byte[2 * bufferSize];
8384
int result;
84-
try (FSDataInputStream inputStream = fs.open(TEST_PATH)) {
85+
try (FSDataInputStream inputStream = fs.open(testPath)) {
8586
((AbfsInputStream) inputStream.getWrappedStream()).registerListener(
8687
new TracingHeaderValidator(abfsConfiguration.getClientCorrelationId(),
8788
fs.getFileSystemId(), FSOperationType.READ, true, 0,
@@ -112,7 +113,8 @@ public void testReadAheadRequestID() throws java.io.IOException {
112113

113114
final byte[] b = new byte[bufferSize * 10];
114115
new Random().nextBytes(b);
115-
try (FSDataOutputStream stream = fs.create(TEST_PATH)) {
116+
Path testPath = path(TEST_PATH);
117+
try (FSDataOutputStream stream = fs.create(testPath)) {
116118
((AbfsOutputStream) stream.getWrappedStream()).registerListener(
117119
new TracingHeaderValidator(abfsConfiguration.getClientCorrelationId(),
118120
fs.getFileSystemId(), FSOperationType.WRITE, false, 0,
@@ -126,7 +128,7 @@ public void testReadAheadRequestID() throws java.io.IOException {
126128
fs.registerListener(
127129
new TracingHeaderValidator(abfsConfiguration.getClientCorrelationId(),
128130
fs.getFileSystemId(), FSOperationType.OPEN, false, 0));
129-
try (FSDataInputStream inputStream = fs.open(TEST_PATH)) {
131+
try (FSDataInputStream inputStream = fs.open(testPath)) {
130132
((AbfsInputStream) inputStream.getWrappedStream()).registerListener(
131133
new TracingHeaderValidator(abfsConfiguration.getClientCorrelationId(),
132134
fs.getFileSystemId(), FSOperationType.READ, false, 0,

hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsStatistics.java

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@ public void testCreateStatistics() throws IOException {
9191

9292
fs.mkdirs(createDirectoryPath);
9393
fs.createNonRecursive(createFilePath, FsPermission
94-
.getDefault(), false, 1024, (short) 1, 1024, null);
94+
.getDefault(), false, 1024, (short) 1, 1024, null).close();
9595

9696
Map<String, Long> metricMap = fs.getInstrumentationMap();
9797
/*
@@ -117,7 +117,7 @@ public void testCreateStatistics() throws IOException {
117117
fs.mkdirs(path(getMethodName() + "Dir" + i));
118118
fs.createNonRecursive(path(getMethodName() + i),
119119
FsPermission.getDefault(), false, 1024, (short) 1,
120-
1024, null);
120+
1024, null).close();
121121
}
122122

123123
metricMap = fs.getInstrumentationMap();
@@ -160,7 +160,7 @@ public void testDeleteStatistics() throws IOException {
160160
files_deleted counters.
161161
*/
162162
fs.mkdirs(createDirectoryPath);
163-
fs.create(path(createDirectoryPath + getMethodName()));
163+
fs.create(path(createDirectoryPath + getMethodName())).close();
164164
fs.delete(createDirectoryPath, true);
165165

166166
Map<String, Long> metricMap = fs.getInstrumentationMap();
@@ -179,7 +179,7 @@ public void testDeleteStatistics() throws IOException {
179179
directories_deleted is called or not.
180180
*/
181181
fs.mkdirs(createDirectoryPath);
182-
fs.create(createFilePath);
182+
fs.create(createFilePath).close();
183183
fs.delete(createDirectoryPath, true);
184184
metricMap = fs.getInstrumentationMap();
185185

@@ -199,9 +199,9 @@ public void testOpenAppendRenameExists() throws IOException {
199199
Path createFilePath = path(getMethodName());
200200
Path destCreateFilePath = path(getMethodName() + "New");
201201

202-
fs.create(createFilePath);
203-
fs.open(createFilePath);
204-
fs.append(createFilePath);
202+
fs.create(createFilePath).close();
203+
fs.open(createFilePath).close();
204+
fs.append(createFilePath).close();
205205
assertTrue(fs.rename(createFilePath, destCreateFilePath));
206206

207207
Map<String, Long> metricMap = fs.getInstrumentationMap();
@@ -225,11 +225,11 @@ public void testOpenAppendRenameExists() throws IOException {
225225
//re-initialising Abfs to reset statistic values.
226226
fs.initialize(fs.getUri(), fs.getConf());
227227

228-
fs.create(destCreateFilePath);
228+
fs.create(destCreateFilePath).close();
229229

230230
for (int i = 0; i < NUMBER_OF_OPS; i++) {
231231
fs.open(destCreateFilePath);
232-
fs.append(destCreateFilePath);
232+
fs.append(destCreateFilePath).close();
233233
}
234234

235235
metricMap = fs.getInstrumentationMap();

hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsStreamStatistics.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -52,8 +52,8 @@ public void testAbfsStreamOps() throws Exception {
5252
+ "Abfs");
5353

5454
final AzureBlobFileSystem fs = getFileSystem();
55-
Path smallOperationsFile = new Path("testOneReadWriteOps");
56-
Path largeOperationsFile = new Path("testLargeReadWriteOps");
55+
Path smallOperationsFile = path("testOneReadWriteOps");
56+
Path largeOperationsFile = path("testLargeReadWriteOps");
5757
FileSystem.Statistics statistics = fs.getFsStatistics();
5858
String testReadWriteOps = "test this";
5959
statistics.reset();

hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemAppend.java

Lines changed: 12 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -35,8 +35,8 @@
3535
*/
3636
public class ITestAzureBlobFileSystemAppend extends
3737
AbstractAbfsIntegrationTest {
38-
private static final Path TEST_FILE_PATH = new Path("testfile");
39-
private static final Path TEST_FOLDER_PATH = new Path("testFolder");
38+
private static final String TEST_FILE_PATH = "testfile";
39+
private static final String TEST_FOLDER_PATH = "testFolder";
4040

4141
public ITestAzureBlobFileSystemAppend() throws Exception {
4242
super();
@@ -45,15 +45,15 @@ public ITestAzureBlobFileSystemAppend() throws Exception {
4545
@Test(expected = FileNotFoundException.class)
4646
public void testAppendDirShouldFail() throws Exception {
4747
final AzureBlobFileSystem fs = getFileSystem();
48-
final Path filePath = TEST_FILE_PATH;
48+
final Path filePath = path(TEST_FILE_PATH);
4949
fs.mkdirs(filePath);
50-
fs.append(filePath, 0);
50+
fs.append(filePath, 0).close();
5151
}
5252

5353
@Test
5454
public void testAppendWithLength0() throws Exception {
5555
final AzureBlobFileSystem fs = getFileSystem();
56-
try(FSDataOutputStream stream = fs.create(TEST_FILE_PATH)) {
56+
try(FSDataOutputStream stream = fs.create(path(TEST_FILE_PATH))) {
5757
final byte[] b = new byte[1024];
5858
new Random().nextBytes(b);
5959
stream.write(b, 1000, 0);
@@ -65,28 +65,29 @@ public void testAppendWithLength0() throws Exception {
6565
@Test(expected = FileNotFoundException.class)
6666
public void testAppendFileAfterDelete() throws Exception {
6767
final AzureBlobFileSystem fs = getFileSystem();
68-
final Path filePath = TEST_FILE_PATH;
68+
final Path filePath = path(TEST_FILE_PATH);
6969
ContractTestUtils.touch(fs, filePath);
7070
fs.delete(filePath, false);
7171

72-
fs.append(filePath);
72+
fs.append(filePath).close();
7373
}
7474

7575
@Test(expected = FileNotFoundException.class)
7676
public void testAppendDirectory() throws Exception {
7777
final AzureBlobFileSystem fs = getFileSystem();
78-
final Path folderPath = TEST_FOLDER_PATH;
78+
final Path folderPath = path(TEST_FOLDER_PATH);
7979
fs.mkdirs(folderPath);
80-
fs.append(folderPath);
80+
fs.append(folderPath).close();
8181
}
8282

8383
@Test
8484
public void testTracingForAppend() throws IOException {
8585
AzureBlobFileSystem fs = getFileSystem();
86-
fs.create(TEST_FILE_PATH);
86+
Path testPath = path(TEST_FILE_PATH);
87+
fs.create(testPath).close();
8788
fs.registerListener(new TracingHeaderValidator(
8889
fs.getAbfsStore().getAbfsConfiguration().getClientCorrelationId(),
8990
fs.getFileSystemId(), FSOperationType.APPEND, false, 0));
90-
fs.append(TEST_FILE_PATH, 10);
91+
fs.append(testPath, 10);
9192
}
9293
}

hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemAuthorization.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,7 @@ public void testSASTokenProviderEmptySASToken() throws Exception {
9999
this.getConfiguration().getRawConfiguration());
100100
intercept(SASTokenProviderException.class,
101101
() -> {
102-
testFs.create(new org.apache.hadoop.fs.Path("/testFile"));
102+
testFs.create(new org.apache.hadoop.fs.Path("/testFile")).close();
103103
});
104104
}
105105

@@ -114,7 +114,7 @@ public void testSASTokenProviderNullSASToken() throws Exception {
114114
testFs.initialize(fs.getUri(), this.getConfiguration().getRawConfiguration());
115115
intercept(SASTokenProviderException.class,
116116
()-> {
117-
testFs.create(new org.apache.hadoop.fs.Path("/testFile"));
117+
testFs.create(new org.apache.hadoop.fs.Path("/testFile")).close();
118118
});
119119
}
120120

@@ -297,7 +297,7 @@ private void executeOp(Path reqPath, AzureBlobFileSystem fs,
297297
fs.listStatus(reqPath);
298298
break;
299299
case CreatePath:
300-
fs.create(reqPath);
300+
fs.create(reqPath).close();
301301
break;
302302
case RenamePath:
303303
fs.rename(reqPath,

hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemBackCompat.java

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -50,13 +50,16 @@ public void testBlobBackCompat() throws Exception {
5050
CloudBlobContainer container = blobClient.getContainerReference(this.getFileSystemName());
5151
container.createIfNotExists();
5252

53-
CloudBlockBlob blockBlob = container.getBlockBlobReference("test/10/10/10");
53+
Path testPath = getUniquePath("test");
54+
CloudBlockBlob blockBlob = container
55+
.getBlockBlobReference(testPath + "/10/10/10");
5456
blockBlob.uploadText("");
5557

56-
blockBlob = container.getBlockBlobReference("test/10/123/3/2/1/3");
58+
blockBlob = container.getBlockBlobReference(testPath + "/10/123/3/2/1/3");
5759
blockBlob.uploadText("");
5860

59-
FileStatus[] fileStatuses = fs.listStatus(new Path("/test/10/"));
61+
FileStatus[] fileStatuses = fs
62+
.listStatus(new Path(String.format("/%s/10/", testPath)));
6063
assertEquals(2, fileStatuses.length);
6164
assertEquals("10", fileStatuses[0].getPath().getName());
6265
assertTrue(fileStatuses[0].isDirectory());

hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemCheckAccess.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -352,7 +352,8 @@ private void modifyAcl(Path file, String uid, FsAction fsAction)
352352

353353
private Path setupTestDirectoryAndUserAccess(String testFileName,
354354
FsAction fsAction) throws Exception {
355-
Path file = new Path(TEST_FOLDER_PATH + testFileName);
355+
Path testPath = path(TEST_FOLDER_PATH);
356+
Path file = new Path(testPath + testFileName);
356357
file = this.superUserFs.makeQualified(file);
357358
this.superUserFs.delete(file, true);
358359
this.superUserFs.create(file);

hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemCopy.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ public void testCopyFromLocalFileSystem() throws Exception {
5353
localFs.delete(localFilePath, true);
5454
try {
5555
writeString(localFs, localFilePath, "Testing");
56-
Path dstPath = new Path("copiedFromLocal");
56+
Path dstPath = path("copiedFromLocal");
5757
assertTrue(FileUtil.copy(localFs, localFilePath, fs, dstPath, false,
5858
fs.getConf()));
5959
assertIsFile(fs, dstPath);

0 commit comments

Comments
 (0)