Skip to content

HADOOP-19424. [S3A] Upgrade JUnit from 4 to 5 in hadoop-aws. #7752

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 3 commits into
base: trunk
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 0 additions & 10 deletions hadoop-tools/hadoop-aws/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -499,11 +499,6 @@
<artifactId>wildfly-openssl</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-inline</artifactId>
Expand Down Expand Up @@ -618,10 +613,5 @@
<artifactId>junit-platform-launcher</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
</project>
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,8 @@

import org.apache.hadoop.conf.Configuration;

import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.rules.ExpectedException;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;


/**
Expand All @@ -49,14 +47,11 @@ public abstract class AbstractS3AMockTest {
.build())
.build();

@Rule
public ExpectedException exception = ExpectedException.none();

protected S3AFileSystem fs;
protected S3Client s3;
protected Configuration conf;

@Before
@BeforeEach
public void setup() throws Exception {
conf = createConfiguration();
fs = new S3AFileSystem();
Expand Down Expand Up @@ -97,7 +92,7 @@ public S3Client getS3Client() {
return s3;
}

@After
@AfterEach
public void teardown() throws Exception {
if (fs != null) {
fs.close();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,9 @@
import org.apache.hadoop.util.SemaphoredDelegatingExecutor;
import org.apache.hadoop.util.StopWatch;

import org.junit.AfterClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.Timeout;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand All @@ -35,11 +34,12 @@
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;

import static org.junit.Assert.assertEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;

/**
* Basic test for S3A's blocking executor service.
*/
@Timeout(60)
public class ITestBlockingThreadPoolExecutorService {

private static final Logger LOG = LoggerFactory.getLogger(
Expand All @@ -56,10 +56,7 @@ public class ITestBlockingThreadPoolExecutorService {

private static BlockingThreadPoolExecutorService tpe;

@Rule
public Timeout testTimeout = new Timeout(60, TimeUnit.SECONDS);

@AfterClass
@AfterAll
public static void afterClass() throws Exception {
ensureDestroyed();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
package org.apache.hadoop.fs.s3a;

import org.assertj.core.api.Assertions;
import org.junit.Test;
import org.junit.jupiter.api.Test;

import org.apache.hadoop.fs.Path;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@

import java.io.IOException;
import java.nio.file.AccessDeniedException;
import java.util.concurrent.TimeUnit;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
Expand All @@ -29,10 +28,9 @@
import org.apache.hadoop.fs.s3a.impl.InstantiationIOException;

import org.assertj.core.api.Assertions;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.Timeout;
import org.junit.jupiter.api.Test;

import org.junit.jupiter.api.Timeout;
import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
import software.amazon.awssdk.auth.credentials.AwsCredentials;
import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
Expand All @@ -46,19 +44,18 @@
import static org.apache.hadoop.fs.s3a.test.PublicDatasetTestUtils.getExternalData;
import static org.apache.hadoop.fs.s3a.test.PublicDatasetTestUtils.isUsingDefaultExternalDataFile;
import static org.apache.hadoop.test.LambdaTestUtils.intercept;
import static org.junit.Assert.*;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail;

/**
* Integration tests for {@link Constants#AWS_CREDENTIALS_PROVIDER} logic
* through the S3A Filesystem instantiation process.
*/
@Timeout(60)
public class ITestS3AAWSCredentialsProvider {
private static final Logger LOG =
LoggerFactory.getLogger(ITestS3AAWSCredentialsProvider.class);

@Rule
public Timeout testTimeout = new Timeout(60_1000, TimeUnit.MILLISECONDS);

/**
* Expecting a wrapped ClassNotFoundException.
*/
Expand Down Expand Up @@ -219,9 +216,8 @@ public void testAnonymousProvider() throws Exception {
.describedAs("Filesystem")
.isNotNull();
FileStatus stat = fs.getFileStatus(testFile);
assertEquals(
"The qualified path returned by getFileStatus should be same as the original file",
testFile, stat.getPath());
assertEquals(testFile, stat.getPath(),
"The qualified path returned by getFileStatus should be same as the original file");
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@
import java.io.IOException;
import java.io.InputStream;

import org.junit.Before;
import org.junit.Test;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.assertj.core.api.Assertions;

import org.apache.hadoop.conf.Configuration;
Expand Down Expand Up @@ -67,7 +67,7 @@ public class ITestS3AAnalyticsAcceleratorStreamReading extends AbstractS3ATestBa

private Path externalTestFile;

@Before
@BeforeEach
public void setUp() throws Exception {
super.setup();
skipIfClientSideEncryption();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import java.io.IOException;

import org.assertj.core.api.Assertions;
import org.junit.Test;
import org.junit.jupiter.api.Test;
import software.amazon.awssdk.services.s3.model.ChecksumAlgorithm;
import software.amazon.awssdk.services.s3.model.ChecksumMode;
import software.amazon.awssdk.services.s3.model.HeadObjectRequest;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@

import java.io.IOException;

import org.junit.Ignore;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;

import org.apache.commons.lang3.StringUtils;
Expand Down Expand Up @@ -96,13 +96,13 @@ protected void assertEncrypted(Path path) throws IOException {
}

@Override
@Ignore
@Disabled
@Test
public void testEncryptionSettingPropagation() throws Throwable {
}

@Override
@Ignore
@Disabled
@Test
public void testEncryption() throws Throwable {
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,9 @@
package org.apache.hadoop.fs.s3a;

import org.apache.hadoop.conf.Configuration;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand All @@ -31,12 +31,12 @@
* Test the test utils. Why an integration test? it's needed to
* verify property pushdown.
*/
public class ITestS3ATestUtils extends Assert {
public class ITestS3ATestUtils extends Assertions {
private static final Logger LOG =
LoggerFactory.getLogger(ITestS3ATestUtils.class);
public static final String KEY = "undefined.property";

@Before
@BeforeEach
public void clear() {
System.clearProperty(KEY);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
import org.apache.hadoop.fs.s3a.impl.PutObjectOptions;
import org.apache.hadoop.fs.store.audit.AuditSpan;

import org.junit.Assert;
import org.junit.jupiter.api.Assertions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand All @@ -45,6 +45,7 @@
import static org.apache.hadoop.fs.contract.ContractTestUtils.createFile;
import static org.apache.hadoop.fs.contract.ContractTestUtils.dataset;
import static org.apache.hadoop.fs.s3a.commit.CommitConstants.MAGIC_PATH_PREFIX;
import static org.junit.jupiter.api.Assertions.assertFalse;

/**
* Utilities for S3A multipart upload tests.
Expand Down Expand Up @@ -80,8 +81,7 @@ static void cleanupParts(S3AFileSystem fs, Set <IdKey> keySet) {
anyFailure = true;
}
}
Assert.assertFalse("Failure aborting multipart upload(s), see log.",
anyFailure);
assertFalse(anyFailure, "Failure aborting multipart upload(s), see log.");
}

public static IdKey createPartUpload(S3AFileSystem fs, String key, int len,
Expand Down Expand Up @@ -116,7 +116,7 @@ public static void assertNoUploadsAt(S3AFileSystem fs, Path path) throws
RemoteIterator<MultipartUpload> uploads = fs.listUploads(key);
while (uploads.hasNext()) {
MultipartUpload upload = uploads.next();
Assert.fail("Found unexpected upload " + upload.key() + " " +
Assertions.fail("Found unexpected upload " + upload.key() + " " +
truncatedUploadId(upload.uploadId()));
}
}
Expand Down
Loading