Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

HADOOP-18302. Remove WhiteBox in hadoop-common module. #4457

Merged
merged 29 commits into from
Sep 12, 2022
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
401ae5a
HADOOP-18302. Remove WhiteBox in hadoop-commmon module.
Jun 18, 2022
9c382a2
HADOOP-18302. Remove WhiteBox in hadoop-commmon module.
Jun 18, 2022
d101a55
HADOOP-18302. Remove WhiteBox in hadoop-common module.
Jun 18, 2022
5918769
HADOOP-18302. Remove WhiteBox in hadoop-common module.
Jun 18, 2022
24ac36b
Merge branch 'apache:trunk' into HADOOP-18302
slfan1989 Jun 18, 2022
a2f7bdb
Merge branch 'apache:trunk' into HADOOP-18302
slfan1989 Jun 20, 2022
e30456b
HADOOP-18302. Fix CheckStyle.
Jun 20, 2022
185c165
Merge branch 'apache:trunk' into HADOOP-18302
slfan1989 Jul 10, 2022
891427d
HADOOP-18302. Remove WhiteBox in hadoop-common module.
Jul 10, 2022
536c143
HADOOP-18302. Remove WhiteBox in hadoop-common module.
Jul 10, 2022
2625e1d
HADOOP-18302. Remove WhiteBox in hadoop-common module.
Jul 10, 2022
2594eb0
HADOOP-18302. Fix CheckStyle.
Jul 10, 2022
2eab504
HADOOP-18302. Fix CheckStyle.
Jul 16, 2022
89f4e9c
HADOOP-18302. Remove WhiteBox in hadoop-common module.
Jul 16, 2022
3ad32d1
Merge branch 'apache:trunk' into HADOOP-18302
slfan1989 Jul 22, 2022
7964f1f
Merge branch 'apache:trunk' into HADOOP-18302
slfan1989 Jul 28, 2022
f378b1c
Merge branch 'apache:trunk' into HADOOP-18302
slfan1989 Aug 2, 2022
6cf3afd
Merge branch 'apache:trunk' into HADOOP-18302
slfan1989 Aug 10, 2022
23a8edf
Merge branch 'apache:trunk' into HADOOP-18302
slfan1989 Aug 17, 2022
bcbafea
YARN-18302. Fix CheckStyle.
Aug 18, 2022
bcaa569
YARN-18302. Fix CheckStyle.
Aug 18, 2022
fa7bbfb
YARN-18302. Fix CodeStyle.
Aug 18, 2022
d835bfe
Merge branch 'apache:trunk' into HADOOP-18302
slfan1989 Aug 19, 2022
839dc70
HADOOP-18302. Fix CheckStyle.
Aug 19, 2022
57ecde4
Merge branch 'apache:trunk' into HADOOP-18302
slfan1989 Aug 24, 2022
bff5a07
HADOOP-18302. Fix CheckStyle.
Aug 30, 2022
27fd624
HADOOP-18302. Fix CheckStyle.
Aug 30, 2022
7421ba8
Merge branch 'apache:trunk' into HADOOP-18302
slfan1989 Sep 11, 2022
82a3cc9
HADOOP-18302. Fix CheckStyle.
Sep 11, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -17,14 +17,14 @@
*/
package org.apache.hadoop.fs;

import org.apache.commons.lang3.reflect.FieldUtils;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem.Statistics;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.LambdaTestUtils;
import org.apache.hadoop.test.Whitebox;
import org.apache.hadoop.util.StringUtils;

import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT;
Expand Down Expand Up @@ -650,7 +650,9 @@ public void testFileStatusPipeFile() throws Exception {
RawLocalFileSystem fs = spy(origFs);
Configuration conf = mock(Configuration.class);
fs.setConf(conf);
Whitebox.setInternalState(fs, "useDeprecatedFileStatus", false);

FieldUtils.getField(RawLocalFileSystem.class,
"useDeprecatedFileStatus", true).set(fs, false);
Path path = new Path("/foo");
File pipe = mock(File.class);
when(pipe.isFile()).thenReturn(false);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
*/
package org.apache.hadoop.http;

import org.apache.commons.lang3.reflect.FieldUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configuration.IntegerRanges;
import org.apache.hadoop.fs.CommonConfigurationKeys;
Expand All @@ -29,7 +30,6 @@
import org.apache.hadoop.security.ShellBasedUnixGroupsMapping;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.test.Whitebox;

import org.assertj.core.api.Assertions;
import org.eclipse.jetty.server.ServerConnector;
Expand Down Expand Up @@ -663,8 +663,8 @@ private HttpServer2 checkBindAddress(String host, int port, boolean findPort)
HttpServer2 server = createServer(host, port);
try {
// not bound, ephemeral should return requested port (0 for ephemeral)
List<?> listeners = (List<?>) Whitebox.getInternalState(server,
"listeners");
List<?> listeners = (List<?>) FieldUtils.
getField(HttpServer2.class, "listeners", true).get(server);
ServerConnector listener = (ServerConnector)listeners.get(0);

assertEquals(port, listener.getPort());
Expand Down Expand Up @@ -740,8 +740,8 @@ public void testBacklogSize() throws Exception
Configuration conf = new Configuration();
conf.setInt(HttpServer2.HTTP_SOCKET_BACKLOG_SIZE_KEY, backlogSize);
HttpServer2 srv = createServer("test", conf);
List<?> listeners = (List<?>) Whitebox.getInternalState(srv,
"listeners");
List<?> listeners = (List<?>) FieldUtils.
getField(HttpServer2.class, "listeners", true).get(srv);
ServerConnector listener = (ServerConnector)listeners.get(0);
assertEquals(backlogSize, listener.getAcceptQueueSize());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@

import javax.net.SocketFactory;

import org.apache.commons.lang3.reflect.FieldUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
Expand All @@ -91,7 +92,6 @@
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.LambdaTestUtils;
import org.apache.hadoop.test.Whitebox;
import org.apache.hadoop.util.StringUtils;
import org.junit.Assert;
import org.junit.Assume;
Expand Down Expand Up @@ -881,9 +881,9 @@ private void checkBlocking(int readers, int readerQ, int callQ) throws Exception
// start server
final TestServerQueue server =
new TestServerQueue(clients, readers, callQ, handlers, conf);
CallQueueManager<Call> spy = spy(
(CallQueueManager<Call>)Whitebox.getInternalState(server, "callQueue"));
Whitebox.setInternalState(server, "callQueue", spy);
CallQueueManager<Call> spy = spy((CallQueueManager<Call>)
FieldUtils.getField(Server.class, "callQueue", true).get(server));
FieldUtils.getField(Server.class, "callQueue", true).set(server, spy);
final InetSocketAddress addr = NetUtils.getConnectAddress(server);
server.start();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

package org.apache.hadoop.ipc;

import org.apache.commons.lang3.reflect.FieldUtils;
import org.apache.hadoop.ipc.metrics.RpcMetrics;

import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
Expand Down Expand Up @@ -50,7 +51,6 @@
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.MetricsAsserts;
import org.apache.hadoop.test.MockitoUtil;
import org.apache.hadoop.test.Whitebox;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
Expand Down Expand Up @@ -299,7 +299,7 @@ public <T> ProtocolProxy<T> getProxy(
throws IOException {
T proxy = (T) Proxy.newProxyInstance(protocol.getClassLoader(),
new Class[] { protocol }, new StoppedInvocationHandler());
return new ProtocolProxy<T>(protocol, proxy, false);
return new ProtocolProxy<>(protocol, proxy, false);
}

@Override
Expand Down Expand Up @@ -1163,9 +1163,9 @@ public void testClientBackOff() throws Exception {
server = setupTestServer(builder);

@SuppressWarnings("unchecked")
aajisaka marked this conversation as resolved.
Show resolved Hide resolved
CallQueueManager<Call> spy = spy((CallQueueManager<Call>) Whitebox
.getInternalState(server, "callQueue"));
Whitebox.setInternalState(server, "callQueue", spy);
CallQueueManager<Call> spy = spy((CallQueueManager<Call>)
FieldUtils.getField(Server.class, "callQueue", true).get(server));
FieldUtils.getField(Server.class, "callQueue", true).set(server, spy);

Exception lastException = null;
proxy = getClient(addr, conf);
Expand Down Expand Up @@ -1217,7 +1217,7 @@ public void testClientBackOffByResponseTime() throws Exception {
GenericTestUtils.setLogLevel(DecayRpcScheduler.LOG, Level.DEBUG);
GenericTestUtils.setLogLevel(RPC.LOG, Level.DEBUG);

final List<Future<Void>> res = new ArrayList<Future<Void>>();
final List<Future<Void>> res = new ArrayList<>();
final ExecutorService executorService =
Executors.newFixedThreadPool(numClients);
conf.setInt(CommonConfigurationKeys.IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, 0);
Expand All @@ -1226,9 +1226,9 @@ public void testClientBackOffByResponseTime() throws Exception {
Server server = setupDecayRpcSchedulerandTestServer(ns + ".");

@SuppressWarnings("unchecked")
aajisaka marked this conversation as resolved.
Show resolved Hide resolved
CallQueueManager<Call> spy = spy((CallQueueManager<Call>) Whitebox
.getInternalState(server, "callQueue"));
Whitebox.setInternalState(server, "callQueue", spy);
CallQueueManager<Call> spy = spy((CallQueueManager<Call>)
FieldUtils.getField(Server.class, "callQueue", true).get(server));
FieldUtils.getField(Server.class, "callQueue", true).set(server, spy);

Exception lastException = null;
proxy = getClient(addr, conf);
Expand Down Expand Up @@ -1567,11 +1567,11 @@ public RpcStatusProto getRpcStatusProto() {
RPC.Builder builder = newServerBuilder(conf)
.setQueueSizePerHandler(1).setNumHandlers(1).setVerbose(true);
server = setupTestServer(builder);
Whitebox.setInternalState(
server, "rpcRequestClass", FakeRequestClass.class);
FieldUtils.getField(Server.class, "rpcRequestClass", true).
set(server, FakeRequestClass.class);
MutableCounterLong authMetric =
(MutableCounterLong)Whitebox.getInternalState(
server.getRpcMetrics(), "rpcAuthorizationSuccesses");
(MutableCounterLong) FieldUtils.getField(RpcMetrics.class,
"rpcAuthorizationSuccesses", true).get(server.getRpcMetrics());

proxy = getClient(addr, conf);
boolean isDisconnected = true;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,11 @@

package org.apache.hadoop.metrics2.impl;

import org.apache.commons.lang3.reflect.FieldUtils;
import org.apache.hadoop.metrics2.AbstractMetric;
import org.apache.hadoop.metrics2.MetricsRecord;
import org.apache.hadoop.metrics2.MetricsTag;
import org.apache.hadoop.metrics2.sink.GraphiteSink;
import org.apache.hadoop.test.Whitebox;
import org.junit.Test;
import org.mockito.ArgumentCaptor;

Expand Down Expand Up @@ -58,7 +58,7 @@ private GraphiteSink.Graphite makeGraphite() {
}

@Test
public void testPutMetrics() {
public void testPutMetrics() throws IllegalAccessException {
GraphiteSink sink = new GraphiteSink();
List<MetricsTag> tags = new ArrayList<MetricsTag>();
tags.add(new MetricsTag(MsInfo.Context, "all"));
Expand All @@ -70,7 +70,7 @@ public void testPutMetrics() {

ArgumentCaptor<String> argument = ArgumentCaptor.forClass(String.class);
final GraphiteSink.Graphite mockGraphite = makeGraphite();
Whitebox.setInternalState(sink, "graphite", mockGraphite);
FieldUtils.getField(GraphiteSink.class, "graphite", true).set(sink, mockGraphite);
sink.putMetrics(record);

try {
Expand All @@ -84,16 +84,16 @@ public void testPutMetrics() {
assertEquals(true,
result.equals("null.all.Context.Context=all.Hostname=host.foo1 1.25 10\n" +
"null.all.Context.Context=all.Hostname=host.foo2 2.25 10\n") ||
result.equals("null.all.Context.Context=all.Hostname=host.foo2 2.25 10\n" +
result.equals("null.all.Context.Context=all.Hostname=host.foo2 2.25 10\n" +
"null.all.Context.Context=all.Hostname=host.foo1 1.25 10\n"));
}

@Test
public void testPutMetrics2() {
public void testPutMetrics2() throws IllegalAccessException {
GraphiteSink sink = new GraphiteSink();
List<MetricsTag> tags = new ArrayList<MetricsTag>();
tags.add(new MetricsTag(MsInfo.Context, "all"));
tags.add(new MetricsTag(MsInfo.Hostname, null));
tags.add(new MetricsTag(MsInfo.Hostname, null));
Set<AbstractMetric> metrics = new HashSet<AbstractMetric>();
metrics.add(makeMetric("foo1", 1));
metrics.add(makeMetric("foo2", 2));
Expand All @@ -102,7 +102,7 @@ public void testPutMetrics2() {

ArgumentCaptor<String> argument = ArgumentCaptor.forClass(String.class);
final GraphiteSink.Graphite mockGraphite = makeGraphite();
Whitebox.setInternalState(sink, "graphite", mockGraphite);
FieldUtils.getField(GraphiteSink.class, "graphite", true).set(sink, mockGraphite);
sink.putMetrics(record);

try {
Expand All @@ -124,12 +124,12 @@ public void testPutMetrics2() {
* Assert that timestamps are converted correctly, ticket HADOOP-11182
*/
@Test
public void testPutMetrics3() {
public void testPutMetrics3() throws IllegalAccessException {

// setup GraphiteSink
GraphiteSink sink = new GraphiteSink();
final GraphiteSink.Graphite mockGraphite = makeGraphite();
Whitebox.setInternalState(sink, "graphite", mockGraphite);
FieldUtils.getField(GraphiteSink.class,"graphite",true).set(sink,mockGraphite);

// given two metrics records with timestamps 1000 milliseconds apart.
List<MetricsTag> tags = Collections.emptyList();
Expand Down Expand Up @@ -158,7 +158,7 @@ public void testPutMetrics3() {
}

@Test
public void testFailureAndPutMetrics() throws IOException {
public void testFailureAndPutMetrics() throws IOException, IllegalAccessException {
GraphiteSink sink = new GraphiteSink();
List<MetricsTag> tags = new ArrayList<MetricsTag>();
tags.add(new MetricsTag(MsInfo.Context, "all"));
Expand All @@ -169,7 +169,7 @@ public void testFailureAndPutMetrics() throws IOException {
MetricsRecord record = new MetricsRecordImpl(MsInfo.Context, (long) 10000, tags, metrics);

final GraphiteSink.Graphite mockGraphite = makeGraphite();
Whitebox.setInternalState(sink, "graphite", mockGraphite);
FieldUtils.getField(GraphiteSink.class, "graphite", true).set(sink, mockGraphite);

// throw exception when first try
doThrow(new IOException("IO exception")).when(mockGraphite).write(anyString());
Expand All @@ -196,20 +196,20 @@ public void testFailureAndPutMetrics() throws IOException {
}

@Test
public void testClose(){
GraphiteSink sink = new GraphiteSink();
final GraphiteSink.Graphite mockGraphite = makeGraphite();
Whitebox.setInternalState(sink, "graphite", mockGraphite);
try {
sink.close();
} catch (IOException ioe) {
ioe.printStackTrace();
}
public void testClose() throws IllegalAccessException {
GraphiteSink sink = new GraphiteSink();
final GraphiteSink.Graphite mockGraphite = makeGraphite();
FieldUtils.getField(GraphiteSink.class,"graphite",true).set(sink,mockGraphite);
try {
sink.close();
} catch (IOException ioe) {
ioe.printStackTrace();
}

try {
verify(mockGraphite).close();
} catch (IOException ioe) {
ioe.printStackTrace();
}
try {
verify(mockGraphite).close();
} catch (IOException ioe) {
ioe.printStackTrace();
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -31,13 +31,13 @@
import java.util.List;
import java.util.Set;

import org.apache.commons.lang3.reflect.FieldUtils;
aajisaka marked this conversation as resolved.
Show resolved Hide resolved
import org.apache.hadoop.metrics2.AbstractMetric;
import org.apache.hadoop.metrics2.MetricType;
import org.apache.hadoop.metrics2.MetricsRecord;
import org.apache.hadoop.metrics2.MetricsTag;
import org.apache.hadoop.metrics2.sink.StatsDSink;
import org.apache.hadoop.metrics2.sink.StatsDSink.StatsD;
import org.apache.hadoop.test.Whitebox;
import org.junit.Test;

public class TestStatsDMetrics {
Expand All @@ -52,7 +52,7 @@ private AbstractMetric makeMetric(String name, Number value,
}

@Test(timeout=3000)
public void testPutMetrics() throws IOException, InterruptedException {
public void testPutMetrics() throws IOException, IllegalAccessException {
final StatsDSink sink = new StatsDSink();
List<MetricsTag> tags = new ArrayList<MetricsTag>();
tags.add(new MetricsTag(MsInfo.Hostname, "host"));
Expand All @@ -69,7 +69,8 @@ public void testPutMetrics() throws IOException, InterruptedException {
final StatsDSink.StatsD mockStatsD =
new StatsD(sock.getLocalAddress().getHostName(),
sock.getLocalPort());
Whitebox.setInternalState(sink, "statsd", mockStatsD);
FieldUtils.getField(StatsDSink.class, "statsd", true).
set(sink, mockStatsD);
final DatagramPacket p = new DatagramPacket(new byte[8192], 8192);
sink.putMetrics(record);
sock.receive(p);
Expand All @@ -87,7 +88,7 @@ public void testPutMetrics() throws IOException, InterruptedException {
}

@Test(timeout=3000)
public void testPutMetrics2() throws IOException {
public void testPutMetrics2() throws IOException, IllegalAccessException {
StatsDSink sink = new StatsDSink();
List<MetricsTag> tags = new ArrayList<MetricsTag>();
tags.add(new MetricsTag(MsInfo.Hostname, null));
Expand All @@ -104,7 +105,8 @@ public void testPutMetrics2() throws IOException {
final StatsDSink.StatsD mockStatsD =
new StatsD(sock.getLocalAddress().getHostName(),
sock.getLocalPort());
Whitebox.setInternalState(sink, "statsd", mockStatsD);
FieldUtils.getField(StatsDSink.class, "statsd", true).
set(sink, mockStatsD);
final DatagramPacket p = new DatagramPacket(new byte[8192], 8192);
sink.putMetrics(record);
sock.receive(p);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,14 @@
import java.net.Socket;
import java.util.Map;

import org.apache.commons.lang3.reflect.FieldUtils;
import org.apache.hadoop.crypto.key.kms.KMSClientProvider;
import org.junit.Assert;

import org.apache.hadoop.oncrpc.RpcCall;
import org.apache.hadoop.oncrpc.XDR;
import org.apache.hadoop.oncrpc.security.CredentialsNone;
import org.apache.hadoop.oncrpc.security.VerifierNone;
import org.apache.hadoop.test.Whitebox;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
Expand Down Expand Up @@ -76,7 +77,7 @@ public void testIdle() throws InterruptedException, IOException {
}

@Test(timeout = 10000)
public void testRegistration() throws IOException, InterruptedException {
public void testRegistration() throws IOException, InterruptedException, IllegalAccessException {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't think this change is needed.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I will fix it.

XDR req = new XDR();
RpcCall.getInstance(++xid, RpcProgramPortmap.PROGRAM,
RpcProgramPortmap.VERSION,
Expand All @@ -101,8 +102,9 @@ public void testRegistration() throws IOException, InterruptedException {
Thread.sleep(100);
boolean found = false;
@SuppressWarnings("unchecked")
Map<String, PortmapMapping> map = (Map<String, PortmapMapping>) Whitebox
.getInternalState(pm.getHandler(), "map");
Map<String, PortmapMapping> map = (Map<String, PortmapMapping>)
FieldUtils.getField(RpcProgramPortmap.class,
"map", true).get(pm.getHandler());

for (PortmapMapping m : map.values()) {
if (m.getPort() == sent.getPort()
Expand Down