Skip to content

HADOOP-14693. Test PR: Ran rewrite plugin on hadoop-hdfs module to upgrade to JUnit 5 #3304

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 1 commit into
base: trunk
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
77 changes: 71 additions & 6 deletions hadoop-hdfs-project/hadoop-hdfs/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -61,11 +61,27 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
<artifactId>zookeeper</artifactId>
<type>test-jar</type>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
</exclusion>
<exclusion>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-core</artifactId>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.xerial.snappy</groupId>
Expand Down Expand Up @@ -154,12 +170,6 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
</exclusion>
</exclusions>
</dependency>

<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minikdc</artifactId>
Expand All @@ -169,11 +179,23 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>io.netty</groupId>
Expand All @@ -184,6 +206,16 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
<groupId>io.netty</groupId>
<artifactId>netty-all</artifactId>
<scope>compile</scope>
<exclusions>
<exclusion>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
</exclusion>
<exclusion>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
Expand All @@ -209,11 +241,27 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<exclusions>
<exclusion>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-test</artifactId>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
</exclusion>
<exclusion>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.assertj</groupId>
Expand Down Expand Up @@ -447,6 +495,23 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
</filesets>
</configuration>
</plugin>
<plugin>
<groupId>org.openrewrite.maven</groupId>
<artifactId>rewrite-maven-plugin</artifactId>
<version>4.9.0</version>
<configuration>
<activeRecipes>
<recipe>org.openrewrite.java.testing.junit5.JUnit5BestPractices</recipe>
</activeRecipes>
</configuration>
<dependencies>
<dependency>
<groupId>org.openrewrite.recipe</groupId>
<artifactId>rewrite-testing-frameworks</artifactId>
<version>1.7.1</version>
</dependency>
</dependencies>
</plugin>
</plugins>
</build>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,8 @@

package org.apache.hadoop;

import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
Expand All @@ -31,11 +29,7 @@

import org.apache.hadoop.ipc.RefreshRegistry;
import org.apache.hadoop.ipc.RefreshResponse;
import org.junit.Test;
import org.junit.Before;
import org.junit.After;
import org.junit.BeforeClass;
import org.junit.AfterClass;
import org.junit.jupiter.api.*;
import org.mockito.Mockito;

/**
Expand All @@ -51,7 +45,7 @@ public class TestGenericRefresh {
private static RefreshHandler firstHandler;
private static RefreshHandler secondHandler;

@BeforeClass
@BeforeAll
public static void setUpBeforeClass() throws Exception {
config = new Configuration();
config.set("hadoop.security.authorization", "true");
Expand All @@ -61,14 +55,14 @@ public static void setUpBeforeClass() throws Exception {
cluster.waitActive();
}

@AfterClass
@AfterAll
public static void tearDownBeforeClass() throws Exception {
if (cluster != null) {
cluster.shutdown();
}
}

@Before
@BeforeEach
public void setUp() throws Exception {
// Register Handlers, first one just sends an ok response
firstHandler = Mockito.mock(RefreshHandler.class);
Expand All @@ -85,7 +79,7 @@ public void setUp() throws Exception {
RefreshRegistry.defaultRegistry().register("secondHandler", secondHandler);
}

@After
@AfterEach
public void tearDown() throws Exception {
RefreshRegistry.defaultRegistry().unregisterAll("firstHandler");
RefreshRegistry.defaultRegistry().unregisterAll("secondHandler");
Expand All @@ -96,7 +90,7 @@ public void testInvalidCommand() throws Exception {
DFSAdmin admin = new DFSAdmin(config);
String [] args = new String[]{"-refresh", "nn"};
int exitCode = admin.run(args);
assertEquals("DFSAdmin should fail due to bad args", -1, exitCode);
assertEquals(-1, exitCode, "DFSAdmin should fail due to bad args");
}

@Test
Expand All @@ -105,7 +99,7 @@ public void testInvalidIdentifier() throws Exception {
String [] args = new String[]{"-refresh", "localhost:" +
cluster.getNameNodePort(), "unregisteredIdentity"};
int exitCode = admin.run(args);
assertEquals("DFSAdmin should fail due to no handler registered", -1, exitCode);
assertEquals(-1, exitCode, "DFSAdmin should fail due to no handler registered");
}

@Test
Expand All @@ -114,7 +108,7 @@ public void testValidIdentifier() throws Exception {
String[] args = new String[]{"-refresh",
"localhost:" + cluster.getNameNodePort(), "firstHandler"};
int exitCode = admin.run(args);
assertEquals("DFSAdmin should succeed", 0, exitCode);
assertEquals(0, exitCode, "DFSAdmin should succeed");

Mockito.verify(firstHandler).handleRefresh("firstHandler", new String[]{});
// Second handler was never called
Expand All @@ -128,11 +122,11 @@ public void testVariableArgs() throws Exception {
String[] args = new String[]{"-refresh", "localhost:" +
cluster.getNameNodePort(), "secondHandler", "one"};
int exitCode = admin.run(args);
assertEquals("DFSAdmin should return 2", 2, exitCode);
assertEquals(2, exitCode, "DFSAdmin should return 2");

exitCode = admin.run(new String[]{"-refresh", "localhost:" +
cluster.getNameNodePort(), "secondHandler", "one", "two"});
assertEquals("DFSAdmin should now return 3", 3, exitCode);
assertEquals(3, exitCode, "DFSAdmin should now return 3");

Mockito.verify(secondHandler).handleRefresh("secondHandler", new String[]{"one"});
Mockito.verify(secondHandler).handleRefresh("secondHandler", new String[]{"one", "two"});
Expand All @@ -147,7 +141,7 @@ public void testUnregistration() throws Exception {
String[] args = new String[]{"-refresh", "localhost:" +
cluster.getNameNodePort(), "firstHandler"};
int exitCode = admin.run(args);
assertEquals("DFSAdmin should return -1", -1, exitCode);
assertEquals(-1, exitCode, "DFSAdmin should return -1");
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,7 @@

package org.apache.hadoop;

import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.junit.jupiter.api.Assertions.*;

import java.io.IOException;
import java.net.BindException;
Expand All @@ -40,8 +37,8 @@
import org.apache.hadoop.ipc.FairCallQueue;
import org.apache.hadoop.metrics2.MetricsException;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.junit.After;
import org.junit.Test;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;

public class TestRefreshCallQueue {
private MiniDFSCluster cluster;
Expand Down Expand Up @@ -77,7 +74,7 @@ private void setUp(Class<?> queueClass) throws IOException {
}
}

@After
@AfterEach
public void tearDown() throws IOException {
if (cluster != null) {
cluster.shutdown();
Expand Down Expand Up @@ -115,23 +112,23 @@ public void testRefresh() throws Exception {
mockQueuePuts = 0;
setUp(MockCallQueue.class);

assertTrue("Mock queue should have been constructed",
mockQueueConstructions > 0);
assertTrue("Puts are routed through MockQueue", canPutInMockQueue());
assertTrue(
mockQueueConstructions > 0, "Mock queue should have been constructed");
assertTrue(canPutInMockQueue(), "Puts are routed through MockQueue");
int lastMockQueueConstructions = mockQueueConstructions;

// Replace queue with the queue specified in core-site.xml, which would be
// the LinkedBlockingQueue
DFSAdmin admin = new DFSAdmin(config);
String [] args = new String[]{"-refreshCallQueue"};
int exitCode = admin.run(args);
assertEquals("DFSAdmin should return 0", 0, exitCode);
assertEquals(0, exitCode, "DFSAdmin should return 0");

assertEquals("Mock queue should have no additional constructions",
lastMockQueueConstructions, mockQueueConstructions);
assertEquals(
lastMockQueueConstructions, mockQueueConstructions, "Mock queue should have no additional constructions");
try {
assertFalse("Puts are routed through LBQ instead of MockQueue",
canPutInMockQueue());
assertFalse(
canPutInMockQueue(), "Puts are routed through LBQ instead of MockQueue");
} catch (IOException ioe) {
fail("Could not put into queue at all");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,9 @@
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;

public class TestAclCLI extends CLITestHelperDFS {
private MiniDFSCluster cluster = null;
Expand All @@ -38,7 +38,7 @@ protected void initConf() {
DFSConfigKeys.DFS_NAMENODE_POSIX_ACL_INHERITANCE_ENABLED_KEY, false);
}

@Before
@BeforeEach
@Override
public void setUp() throws Exception {
super.setUp();
Expand All @@ -49,7 +49,7 @@ public void setUp() throws Exception {
username = System.getProperty("user.name");
}

@After
@AfterEach
@Override
public void tearDown() throws Exception {
super.tearDown();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@
*/
package org.apache.hadoop.cli;

import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_POSIX_ACL_INHERITANCE_ENABLED_KEY;
import org.junit.jupiter.api.Test;

import org.junit.Test;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_POSIX_ACL_INHERITANCE_ENABLED_KEY;

/**
* Test ACL CLI with POSIX ACL inheritance enabled.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

package org.apache.hadoop.cli;

import static org.junit.Assert.assertTrue;
import static org.junit.jupiter.api.Assertions.assertTrue;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand All @@ -37,9 +37,9 @@
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.tools.CacheAdmin;
import org.apache.hadoop.security.authorize.PolicyProvider;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.xml.sax.SAXException;

public class TestCacheAdminCLI extends CLITestHelper {
Expand All @@ -51,7 +51,7 @@ public class TestCacheAdminCLI extends CLITestHelper {
protected FileSystem fs = null;
protected String namenode = null;

@Before
@BeforeEach
@Override
public void setUp() throws Exception {
super.setUp();
Expand All @@ -68,11 +68,11 @@ public void setUp() throws Exception {
username = System.getProperty("user.name");

fs = dfsCluster.getFileSystem();
assertTrue("Not a HDFS: "+fs.getUri(),
fs instanceof DistributedFileSystem);
assertTrue(
fs instanceof DistributedFileSystem, "Not a HDFS: " + fs.getUri());
}

@After
@AfterEach
@Override
public void tearDown() throws Exception {
if (fs != null) {
Expand Down
Loading