Skip to content

Commit dfb9fdc

Browse files
committed
HBASE-27443 Use java11 in the general check of our jenkins job (#4845)
Signed-off-by: Guanghao Zhang <zghao@apache.org> (cherry picked from commit 5c6ff7d)
1 parent 78ddd3d commit dfb9fdc

File tree

11 files changed

+59
-79
lines changed

11 files changed

+59
-79
lines changed

dev-support/Jenkinsfile

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -202,7 +202,9 @@ pipeline {
202202
environment {
203203
BASEDIR = "${env.WORKSPACE}/component"
204204
TESTS = "${env.SHALLOW_CHECKS}"
205-
SET_JAVA_HOME = '/usr/lib/jvm/java-8'
205+
SET_JAVA_HOME = "/usr/lib/jvm/java-11"
206+
// Activates hadoop 3.0 profile in maven runs.
207+
HADOOP_PROFILE = '3.0'
206208
OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}"
207209
OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_GENERAL}"
208210
ASF_NIGHTLIES_GENERAL_CHECK_BASE="${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}"

dev-support/Jenkinsfile_GitHub

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ pipeline {
7878
environment {
7979
// customized per parallel stage
8080
PLUGINS = "${GENERAL_CHECK_PLUGINS}"
81-
SET_JAVA_HOME = '/usr/lib/jvm/java-8'
81+
SET_JAVA_HOME = "/usr/lib/jvm/java-11"
8282
WORKDIR_REL = "${WORKDIR_REL_GENERAL_CHECK}"
8383
// identical for all parallel stages
8484
WORKDIR = "${WORKSPACE}/${WORKDIR_REL}"

hbase-build-configuration/pom.xml

Lines changed: 21 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -69,49 +69,19 @@
6969
<activation>
7070
<activeByDefault>false</activeByDefault>
7171
</activation>
72-
<properties>
73-
<!-- https://errorprone.info/docs/installation Maven section has details -->
74-
<!-- required when compiling with JDK 8 -->
75-
<javac.version>9+181-r4173-1</javac.version>
76-
</properties>
77-
<dependencies>
78-
<dependency>
79-
<groupId>com.google.errorprone</groupId>
80-
<artifactId>error_prone_core</artifactId>
81-
<version>${error-prone.version}</version>
82-
<scope>provided</scope>
83-
<exclusions>
84-
<exclusion>
85-
<groupId>com.google.code.findbugs</groupId>
86-
<artifactId>jsr305</artifactId>
87-
</exclusion>
88-
</exclusions>
89-
</dependency>
90-
<dependency>
91-
<groupId>com.google.errorprone</groupId>
92-
<artifactId>javac</artifactId>
93-
<version>${javac.version}</version>
94-
<scope>provided</scope>
95-
</dependency>
96-
</dependencies>
9772
<build>
9873
<plugins>
9974
<!-- Turn on error-prone -->
10075
<plugin>
10176
<groupId>org.apache.maven.plugins</groupId>
10277
<artifactId>maven-compiler-plugin</artifactId>
10378
<configuration>
104-
<source>${compileSource}</source>
105-
<target>${compileSource}</target>
106-
<!-- required when compiling with JDK 8 -->
107-
<fork>true</fork>
79+
<release>${releaseTarget}</release>
10880
<showWarnings>true</showWarnings>
10981
<compilerArgs>
11082
<arg>-XDcompilePolicy=simple</arg>
11183
<!-- All -Xep need to be on single line see: https://github.com/google/error-prone/pull/1115 -->
112-
<arg>-Xplugin:ErrorProne -XepDisableWarningsInGeneratedCode -Xep:FallThrough:OFF -Xep:MutablePublicArray:OFF -Xep:ClassNewInstance:ERROR -Xep:MissingDefault:ERROR</arg>
113-
<!-- Required when compiling with JDK 8 -->
114-
<arg>-J-Xbootclasspath/p:${settings.localRepository}/com/google/errorprone/javac/${javac.version}/javac-${javac.version}.jar</arg>
84+
<arg>-Xplugin:ErrorProne -XepDisableWarningsInGeneratedCode -XepExcludedPaths:.*/target/.* -Xep:FallThrough:OFF -Xep:MutablePublicArray:OFF -Xep:ClassNewInstance:ERROR -Xep:MissingDefault:ERROR -Xep:BanJNDI:WARN</arg>
11585
</compilerArgs>
11686
<annotationProcessorPaths>
11787
<path>
@@ -122,6 +92,25 @@
12292
</annotationProcessorPaths>
12393
</configuration>
12494
</plugin>
95+
<plugin>
96+
<groupId>org.apache.maven.plugins</groupId>
97+
<artifactId>maven-enforcer-plugin</artifactId>
98+
<executions>
99+
<execution>
100+
<id>jdk11-required</id>
101+
<goals>
102+
<goal>enforce</goal>
103+
</goals>
104+
<configuration>
105+
<rules>
106+
<requireJavaVersion>
107+
<version>[11,)</version>
108+
</requireJavaVersion>
109+
</rules>
110+
</configuration>
111+
</execution>
112+
</executions>
113+
</plugin>
125114
</plugins>
126115
</build>
127116
</profile>

hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -201,4 +201,4 @@ public static BloomFilterWriter createDeleteBloomAtWrite(Configuration conf,
201201
writer.addInlineBlockWriter(bloomWriter);
202202
return bloomWriter;
203203
}
204-
};
204+
}

hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerCommandLine.java

Lines changed: 5 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -36,21 +36,17 @@
3636
import org.slf4j.Logger;
3737
import org.slf4j.LoggerFactory;
3838

39+
import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableSet;
40+
3941
/**
4042
* Base class for command lines that start up various HBase daemons.
4143
*/
4244
@InterfaceAudience.Private
4345
public abstract class ServerCommandLine extends Configured implements Tool {
4446
private static final Logger LOG = LoggerFactory.getLogger(ServerCommandLine.class);
45-
@SuppressWarnings("serial")
46-
private static final Set<String> DEFAULT_SKIP_WORDS = new HashSet<String>() {
47-
{
48-
add("secret");
49-
add("passwd");
50-
add("password");
51-
add("credential");
52-
}
53-
};
47+
48+
private static final Set<String> DEFAULT_SKIP_WORDS =
49+
ImmutableSet.of("secret", "passwd", "password", "credential");
5450

5551
/**
5652
* Implementing subclasses should return a usage string to print out.

hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaCache.java

Lines changed: 5 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
import static org.junit.Assert.fail;
2525

2626
import java.io.IOException;
27-
import java.util.ArrayList;
27+
import java.util.Arrays;
2828
import java.util.List;
2929
import org.apache.hadoop.conf.Configuration;
3030
import org.apache.hadoop.hbase.CallQueueTooBigException;
@@ -266,16 +266,10 @@ public void testCacheClearingOnCallQueueTooBig() throws Exception {
266266
}
267267

268268
public static List<Throwable> metaCachePreservingExceptions() {
269-
return new ArrayList<Throwable>() {
270-
{
271-
add(new RegionOpeningException(" "));
272-
add(new RegionTooBusyException("Some old message"));
273-
add(new RpcThrottlingException(" "));
274-
add(new MultiActionResultTooLarge(" "));
275-
add(new RetryImmediatelyException(" "));
276-
add(new CallQueueTooBigException());
277-
}
278-
};
269+
return Arrays.asList(new RegionOpeningException(" "),
270+
new RegionTooBusyException("Some old message"), new RpcThrottlingException(" "),
271+
new MultiActionResultTooLarge(" "), new RetryImmediatelyException(" "),
272+
new CallQueueTooBigException());
279273
}
280274

281275
public static class RegionServerWithFakeRpcServices extends HRegionServer {

hbase-server/src/test/java/org/apache/hadoop/hbase/favored/TestFavoredNodeAssignmentHelper.java

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -366,13 +366,12 @@ private void checkNumRegions(int regionCount, int firstRackSize, int secondRackS
366366
regionMap.put(regionsOnRack1, 1);
367367
regionMap.put(regionsOnRack2, 2);
368368
regionMap.put(regionsOnRack3, 3);
369-
assertTrue(printProportions(firstRackSize, secondRackSize, thirdRackSize, regionsOnRack1,
370-
regionsOnRack2, regionsOnRack3), rackMap.get(firstRackSize) == regionMap.get(regionsOnRack1));
371-
assertTrue(printProportions(firstRackSize, secondRackSize, thirdRackSize, regionsOnRack1,
372-
regionsOnRack2, regionsOnRack3),
373-
rackMap.get(secondRackSize) == regionMap.get(regionsOnRack2));
374-
assertTrue(printProportions(firstRackSize, secondRackSize, thirdRackSize, regionsOnRack1,
375-
regionsOnRack2, regionsOnRack3), rackMap.get(thirdRackSize) == regionMap.get(regionsOnRack3));
369+
assertEquals(printProportions(firstRackSize, secondRackSize, thirdRackSize, regionsOnRack1,
370+
regionsOnRack2, regionsOnRack3), rackMap.get(firstRackSize), regionMap.get(regionsOnRack1));
371+
assertEquals(printProportions(firstRackSize, secondRackSize, thirdRackSize, regionsOnRack1,
372+
regionsOnRack2, regionsOnRack3), rackMap.get(secondRackSize), regionMap.get(regionsOnRack2));
373+
assertEquals(printProportions(firstRackSize, secondRackSize, thirdRackSize, regionsOnRack1,
374+
regionsOnRack2, regionsOnRack3), rackMap.get(thirdRackSize), regionMap.get(regionsOnRack3));
376375
}
377376

378377
private String printProportions(int firstRackSize, int secondRackSize, int thirdRackSize,

hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -539,14 +539,15 @@ public void testScanAndConcurrentMajorCompact() throws Exception {
539539
}
540540
}
541541

542-
/*
543-
* @param hri Region
542+
/**
543+
* Count table.
544+
* @param hri Region
544545
* @param flushIndex At what row we start the flush.
545546
* @param concurrent if the flush should be concurrent or sync.
546547
* @return Count of rows found.
547548
*/
548549
private int count(final Table countTable, final int flushIndex, boolean concurrent)
549-
throws IOException {
550+
throws Exception {
550551
LOG.info("Taking out counting scan");
551552
Scan scan = new Scan();
552553
for (byte[] qualifier : EXPLICIT_COLS) {
@@ -574,10 +575,10 @@ public void run() {
574575
}
575576
}
576577
};
577-
if (concurrent) {
578-
t.start(); // concurrently flush.
579-
} else {
580-
t.run(); // sync flush
578+
t.start();
579+
if (!concurrent) {
580+
// sync flush
581+
t.join();
581582
}
582583
LOG.info("Continuing on after kicking off background flush");
583584
justFlushed = true;

hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestNonHBaseReplicationEndpoint.java

Lines changed: 4 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,6 @@
1919

2020
import java.io.IOException;
2121
import java.util.ArrayList;
22-
import java.util.HashMap;
23-
import java.util.List;
2422
import java.util.UUID;
2523
import java.util.concurrent.TimeUnit;
2624
import java.util.concurrent.atomic.AtomicBoolean;
@@ -45,6 +43,8 @@
4543
import org.junit.Test;
4644
import org.junit.experimental.categories.Category;
4745

46+
import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableMap;
47+
4848
@Category({ MediumTests.class, ReplicationTests.class })
4949
public class TestNonHBaseReplicationEndpoint {
5050

@@ -86,11 +86,8 @@ public void test() throws IOException {
8686

8787
ReplicationPeerConfig peerConfig = ReplicationPeerConfig.newBuilder()
8888
.setReplicationEndpointImpl(NonHBaseReplicationEndpoint.class.getName())
89-
.setReplicateAllUserTables(false).setTableCFsMap(new HashMap<TableName, List<String>>() {
90-
{
91-
put(tableName, new ArrayList<>());
92-
}
93-
}).build();
89+
.setReplicateAllUserTables(false)
90+
.setTableCFsMap(ImmutableMap.of(tableName, new ArrayList<>())).build();
9491

9592
ADMIN.addReplicationPeer("1", peerConfig);
9693
loadData(table);

hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestBasicWALEntryStream.java

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -611,7 +611,8 @@ public void testEOFExceptionForRecoveredQueue() throws Exception {
611611
localLogQueue.enqueueLog(emptyLog, fakeWalGroupId);
612612
ReplicationSourceWALReader reader = new ReplicationSourceWALReader(fs, conf, localLogQueue, 0,
613613
getDummyFilter(), source, fakeWalGroupId);
614-
reader.run();
614+
reader.start();
615+
reader.join();
615616
// ReplicationSourceWALReaderThread#handleEofException method will
616617
// remove empty log from logQueue.
617618
assertEquals(0, localLogQueue.getQueueSize(fakeWalGroupId));
@@ -650,7 +651,8 @@ public void testEOFExceptionForRecoveredQueueWithMultipleLogs() throws Exception
650651
getDummyFilter(), source, fakeWalGroupId);
651652
assertEquals("Initial log queue size is not correct", 2,
652653
localLogQueue.getQueueSize(fakeWalGroupId));
653-
reader.run();
654+
reader.start();
655+
reader.join();
654656

655657
// remove empty log from logQueue.
656658
assertEquals(0, localLogQueue.getQueueSize(fakeWalGroupId));

pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -617,7 +617,7 @@
617617
-->
618618
<checkstyle.version>8.29</checkstyle.version>
619619
<exec.maven.version>1.6.0</exec.maven.version>
620-
<error-prone.version>2.10.0</error-prone.version>
620+
<error-prone.version>2.16</error-prone.version>
621621
<jamon.plugin.version>2.4.2</jamon.plugin.version>
622622
<lifecycle.mapping.version>1.0.0</lifecycle.mapping.version>
623623
<maven.antrun.version>1.8</maven.antrun.version>

0 commit comments

Comments
 (0)