Skip to content

Commit 48b37a7

Browse files
HADOOP-18487. Make protobuf 2.5 an optional runtime dependency. (#4996)
Protobuf 2.5 JAR is no longer needed at runtime. The option common.protobuf.scope defines whether the protobuf 2.5.0 dependency is marked as provided or not. * New package org.apache.hadoop.ipc.internal for internal only protobuf classes ...with a ShadedProtobufHelper in there which has shaded protobuf refs only, so guaranteed not to need protobuf-2.5 on the CP * All uses of org.apache.hadoop.ipc.ProtobufHelper have been replaced by uses of org.apache.hadoop.ipc.internal.ShadedProtobufHelper * The scope of protobuf-2.5 is set by the option common.protobuf2.scope In this patch is it is still "compile" * There is explicit reference to it in modules where it may be needed. * The maven scope of the dependency can be set with the common.protobuf2.scope option. It can be set to "provided" in a build: -Dcommon.protobuf2.scope=provided * Add new ipc(callable) method to catch and convert shaded protobuf exceptions raised during invocation of the supplied lambda expression * This is adopted in the code where the migration is not traumatically over-complex. RouterAdminProtocolTranslatorPB is left alone for this reason. Contributed by Steve Loughran
1 parent 78fc23e commit 48b37a7

File tree

53 files changed

+1261
-1626
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

53 files changed

+1261
-1626
lines changed

BUILDING.txt

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -293,6 +293,30 @@ Maven build goals:
293293
package. This option requires that -Dpmdk.lib is specified. With -Dbundle.pmdk provided,
294294
the build will fail if -Dpmdk.lib is not specified.
295295

296+
Controlling the redistribution of the protobuf-2.5 dependency
297+
298+
The protobuf 2.5.0 library is used at compile time to compile the class
299+
org.apache.hadoop.ipc.ProtobufHelper; this class known to have been used by
300+
external projects in the past. Protobuf 2.5 is not used elsewhere in
301+
the Hadoop codebase; alongside the move to Protobuf 3.x a private successor
302+
class, org.apache.hadoop.ipc.internal.ShadedProtobufHelper is now used.
303+
304+
The hadoop-common JAR still declares a dependency on protobuf-2.5, but this
305+
is likely to change in the future. The maven scope of the dependency can be
306+
set with the common.protobuf2.scope option.
307+
It can be set to "provided" in a build:
308+
-Dcommon.protobuf2.scope=provided
309+
If this is done then protobuf-2.5.0.jar will no longer be exported as a dependency,
310+
and will then be omitted from the share/hadoop/common/lib/ directory of
311+
any Hadoop distribution built. Any application declaring a dependency on hadoop-commmon
312+
will no longer get the dependency; if they need it then they must explicitly declare it:
313+
314+
<dependency>
315+
<groupId>com.google.protobuf</groupId>
316+
<artifactId>protobuf-java</artifactId>
317+
<version>2.5.0</version>
318+
</dependency>
319+
296320
----------------------------------------------------------------------------------
297321
Building components separately
298322

hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -451,8 +451,7 @@
451451
</Match>
452452

453453
<Match>
454-
<Class name="org.apache.hadoop.ipc.ProtobufHelper" />
455-
<Method name="getFixedByteString" />
454+
<Class name="org.apache.hadoop.ipc.internal.ShadedProtobufHelper" />
456455
<Bug pattern="AT_OPERATION_SEQUENCE_ON_CONCURRENT_ABSTRACTION" />
457456
</Match>
458457
</FindBugsFilter>

hadoop-common-project/hadoop-common/pom.xml

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -250,10 +250,11 @@
250250
<artifactId>re2j</artifactId>
251251
<scope>compile</scope>
252252
</dependency>
253+
<!-- Needed for compilation, though no longer in production. -->
253254
<dependency>
254255
<groupId>com.google.protobuf</groupId>
255256
<artifactId>protobuf-java</artifactId>
256-
<scope>compile</scope>
257+
<scope>${common.protobuf2.scope}</scope>
257258
</dependency>
258259
<dependency>
259260
<groupId>com.google.code.gson</groupId>
@@ -484,11 +485,11 @@
484485
<!--These classes have direct Protobuf references for backward compatibility reasons-->
485486
<excludes>
486487
<exclude>**/ProtobufHelper.java</exclude>
487-
<exclude>**/RpcWritable.java</exclude>
488488
<exclude>**/ProtobufRpcEngineCallback.java</exclude>
489489
<exclude>**/ProtobufRpcEngine.java</exclude>
490490
<exclude>**/ProtobufRpcEngine2.java</exclude>
491491
<exclude>**/ProtobufRpcEngineProtos.java</exclude>
492+
<exclude>**/ProtobufWrapperLegacy.java</exclude>
492493
</excludes>
493494
</configuration>
494495
</execution>

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolClientSideTranslatorPB.java

Lines changed: 15 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -37,14 +37,13 @@
3737
import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto;
3838
import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto;
3939
import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToObserverRequestProto;
40-
import org.apache.hadoop.ipc.ProtobufHelper;
4140
import org.apache.hadoop.ipc.ProtobufRpcEngine2;
4241
import org.apache.hadoop.ipc.ProtocolTranslator;
4342
import org.apache.hadoop.ipc.RPC;
4443
import org.apache.hadoop.security.UserGroupInformation;
45-
4644
import org.apache.hadoop.thirdparty.protobuf.RpcController;
47-
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
45+
46+
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
4847

4948
/**
5049
* This class is the client side translator to translate the requests made on
@@ -84,60 +83,39 @@ public HAServiceProtocolClientSideTranslatorPB(
8483

8584
@Override
8685
public void monitorHealth() throws IOException {
87-
try {
88-
rpcProxy.monitorHealth(NULL_CONTROLLER, MONITOR_HEALTH_REQ);
89-
} catch (ServiceException e) {
90-
throw ProtobufHelper.getRemoteException(e);
91-
}
86+
ipc(() -> rpcProxy.monitorHealth(NULL_CONTROLLER, MONITOR_HEALTH_REQ));
9287
}
9388

9489
@Override
9590
public void transitionToActive(StateChangeRequestInfo reqInfo) throws IOException {
96-
try {
97-
TransitionToActiveRequestProto req =
98-
TransitionToActiveRequestProto.newBuilder()
91+
TransitionToActiveRequestProto req =
92+
TransitionToActiveRequestProto.newBuilder()
9993
.setReqInfo(convert(reqInfo)).build();
100-
101-
rpcProxy.transitionToActive(NULL_CONTROLLER, req);
102-
} catch (ServiceException e) {
103-
throw ProtobufHelper.getRemoteException(e);
104-
}
94+
ipc(() -> rpcProxy.transitionToActive(NULL_CONTROLLER, req));
10595
}
10696

10797
@Override
10898
public void transitionToStandby(StateChangeRequestInfo reqInfo) throws IOException {
109-
try {
110-
TransitionToStandbyRequestProto req =
99+
TransitionToStandbyRequestProto req =
111100
TransitionToStandbyRequestProto.newBuilder()
112-
.setReqInfo(convert(reqInfo)).build();
113-
rpcProxy.transitionToStandby(NULL_CONTROLLER, req);
114-
} catch (ServiceException e) {
115-
throw ProtobufHelper.getRemoteException(e);
116-
}
101+
.setReqInfo(convert(reqInfo)).build();
102+
ipc(() -> rpcProxy.transitionToStandby(NULL_CONTROLLER, req));
117103
}
118104

119105
@Override
120106
public void transitionToObserver(StateChangeRequestInfo reqInfo)
121107
throws IOException {
122-
try {
123-
TransitionToObserverRequestProto req =
124-
TransitionToObserverRequestProto.newBuilder()
125-
.setReqInfo(convert(reqInfo)).build();
126-
rpcProxy.transitionToObserver(NULL_CONTROLLER, req);
127-
} catch (ServiceException e) {
128-
throw ProtobufHelper.getRemoteException(e);
129-
}
108+
TransitionToObserverRequestProto req =
109+
TransitionToObserverRequestProto.newBuilder()
110+
.setReqInfo(convert(reqInfo)).build();
111+
ipc(() -> rpcProxy.transitionToObserver(NULL_CONTROLLER, req));
130112
}
131113

132114
@Override
133115
public HAServiceStatus getServiceStatus() throws IOException {
134116
GetServiceStatusResponseProto status;
135-
try {
136-
status = rpcProxy.getServiceStatus(NULL_CONTROLLER,
137-
GET_SERVICE_STATUS_REQ);
138-
} catch (ServiceException e) {
139-
throw ProtobufHelper.getRemoteException(e);
140-
}
117+
status = ipc(() -> rpcProxy.getServiceStatus(NULL_CONTROLLER,
118+
GET_SERVICE_STATUS_REQ));
141119

142120
HAServiceStatus ret = new HAServiceStatus(
143121
convert(status.getState()));

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/ZKFCProtocolClientSideTranslatorPB.java

Lines changed: 8 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -27,15 +27,14 @@
2727
import org.apache.hadoop.ha.ZKFCProtocol;
2828
import org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto;
2929
import org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto;
30-
import org.apache.hadoop.ipc.ProtobufHelper;
3130
import org.apache.hadoop.ipc.ProtobufRpcEngine2;
3231
import org.apache.hadoop.ipc.ProtocolTranslator;
3332
import org.apache.hadoop.ipc.RPC;
3433
import org.apache.hadoop.security.AccessControlException;
3534
import org.apache.hadoop.security.UserGroupInformation;
36-
3735
import org.apache.hadoop.thirdparty.protobuf.RpcController;
38-
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
36+
37+
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
3938

4039

4140
public class ZKFCProtocolClientSideTranslatorPB implements
@@ -57,24 +56,16 @@ public ZKFCProtocolClientSideTranslatorPB(
5756
@Override
5857
public void cedeActive(int millisToCede) throws IOException,
5958
AccessControlException {
60-
try {
61-
CedeActiveRequestProto req = CedeActiveRequestProto.newBuilder()
62-
.setMillisToCede(millisToCede)
63-
.build();
64-
rpcProxy.cedeActive(NULL_CONTROLLER, req);
65-
} catch (ServiceException e) {
66-
throw ProtobufHelper.getRemoteException(e);
67-
}
59+
CedeActiveRequestProto req = CedeActiveRequestProto.newBuilder()
60+
.setMillisToCede(millisToCede)
61+
.build();
62+
ipc(() -> rpcProxy.cedeActive(NULL_CONTROLLER, req));
6863
}
6964

7065
@Override
7166
public void gracefulFailover() throws IOException, AccessControlException {
72-
try {
73-
rpcProxy.gracefulFailover(NULL_CONTROLLER,
74-
GracefulFailoverRequestProto.getDefaultInstance());
75-
} catch (ServiceException e) {
76-
throw ProtobufHelper.getRemoteException(e);
77-
}
67+
ipc(() -> rpcProxy.gracefulFailover(NULL_CONTROLLER,
68+
GracefulFailoverRequestProto.getDefaultInstance()));
7869
}
7970

8071

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java

Lines changed: 40 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -18,10 +18,10 @@
1818
package org.apache.hadoop.ipc;
1919

2020
import java.io.IOException;
21-
import java.util.concurrent.ConcurrentHashMap;
2221

2322
import org.apache.hadoop.classification.InterfaceAudience;
2423
import org.apache.hadoop.io.Text;
24+
import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
2525
import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
2626
import org.apache.hadoop.security.token.Token;
2727
import org.apache.hadoop.security.token.TokenIdentifier;
@@ -30,31 +30,37 @@
3030
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
3131

3232
/**
33-
* Helper methods for protobuf related RPC implementation
33+
* Helper methods for protobuf related RPC implementation.
34+
* This is deprecated because it references protobuf 2.5 classes
35+
* as well as the shaded ones -and so needs an unshaded protobuf-2.5
36+
* JAR on the classpath during execution.
37+
* It MUST NOT be used internally; it is retained in case existing,
38+
* external applications already use it.
39+
* @deprecated hadoop code MUST use {@link ShadedProtobufHelper}.
3440
*/
3541
@InterfaceAudience.Private
42+
@Deprecated
3643
public class ProtobufHelper {
44+
3745
private ProtobufHelper() {
3846
// Hidden constructor for class with only static helper methods
3947
}
4048

4149
/**
42-
* Return the IOException thrown by the remote server wrapped in
50+
* Return the IOException thrown by the remote server wrapped in
4351
* ServiceException as cause.
4452
* @param se ServiceException that wraps IO exception thrown by the server
4553
* @return Exception wrapped in ServiceException or
4654
* a new IOException that wraps the unexpected ServiceException.
4755
*/
4856
public static IOException getRemoteException(ServiceException se) {
49-
Throwable e = se.getCause();
50-
if (e == null) {
51-
return new IOException(se);
52-
}
53-
return e instanceof IOException ? (IOException) e : new IOException(se);
57+
return ShadedProtobufHelper.getRemoteException(se);
5458
}
5559

5660
/**
57-
* Kept for backward compatible.
61+
* Extract the remote exception from an unshaded version of the protobuf
62+
* libraries.
63+
* Kept for backward compatibility.
5864
* Return the IOException thrown by the remote server wrapped in
5965
* ServiceException as cause.
6066
* @param se ServiceException that wraps IO exception thrown by the server
@@ -71,29 +77,13 @@ public static IOException getRemoteException(
7177
return e instanceof IOException ? (IOException) e : new IOException(se);
7278
}
7379

74-
/**
75-
* Map used to cache fixed strings to ByteStrings. Since there is no
76-
* automatic expiration policy, only use this for strings from a fixed, small
77-
* set.
78-
* <p/>
79-
* This map should not be accessed directly. Used the getFixedByteString
80-
* methods instead.
81-
*/
82-
private final static ConcurrentHashMap<Object, ByteString>
83-
FIXED_BYTESTRING_CACHE = new ConcurrentHashMap<>();
84-
8580
/**
8681
* Get the ByteString for frequently used fixed and small set strings.
8782
* @param key string
8883
* @return the ByteString for frequently used fixed and small set strings.
8984
*/
9085
public static ByteString getFixedByteString(Text key) {
91-
ByteString value = FIXED_BYTESTRING_CACHE.get(key);
92-
if (value == null) {
93-
value = ByteString.copyFromUtf8(key.toString());
94-
FIXED_BYTESTRING_CACHE.put(new Text(key.copyBytes()), value);
95-
}
96-
return value;
86+
return ShadedProtobufHelper.getFixedByteString(key);
9787
}
9888

9989
/**
@@ -102,34 +92,40 @@ public static ByteString getFixedByteString(Text key) {
10292
* @return ByteString for frequently used fixed and small set strings.
10393
*/
10494
public static ByteString getFixedByteString(String key) {
105-
ByteString value = FIXED_BYTESTRING_CACHE.get(key);
106-
if (value == null) {
107-
value = ByteString.copyFromUtf8(key);
108-
FIXED_BYTESTRING_CACHE.put(key, value);
109-
}
110-
return value;
95+
return ShadedProtobufHelper.getFixedByteString(key);
11196
}
11297

98+
/**
99+
* Get the byte string of a non-null byte array.
100+
* If the array is 0 bytes long, return a singleton to reduce object allocation.
101+
* @param bytes bytes to convert.
102+
* @return a value
103+
*/
113104
public static ByteString getByteString(byte[] bytes) {
114105
// return singleton to reduce object allocation
115-
return (bytes.length == 0) ? ByteString.EMPTY : ByteString.copyFrom(bytes);
106+
return ShadedProtobufHelper.getByteString(bytes);
116107
}
117108

109+
/**
110+
* Get a token from a TokenProto payload.
111+
* @param tokenProto marshalled token
112+
* @return the token.
113+
*/
118114
public static Token<? extends TokenIdentifier> tokenFromProto(
119115
TokenProto tokenProto) {
120-
Token<? extends TokenIdentifier> token = new Token<>(
121-
tokenProto.getIdentifier().toByteArray(),
122-
tokenProto.getPassword().toByteArray(), new Text(tokenProto.getKind()),
123-
new Text(tokenProto.getService()));
124-
return token;
116+
return ShadedProtobufHelper.tokenFromProto(tokenProto);
125117
}
126118

119+
/**
120+
* Create a {@code TokenProto} instance
121+
* from a hadoop token.
122+
* This builds and caches the fields
123+
* (identifier, password, kind, service) but not
124+
* renewer or any payload.
125+
* @param tok token
126+
* @return a marshallable protobuf class.
127+
*/
127128
public static TokenProto protoFromToken(Token<?> tok) {
128-
TokenProto.Builder builder = TokenProto.newBuilder().
129-
setIdentifier(getByteString(tok.getIdentifier())).
130-
setPassword(getByteString(tok.getPassword())).
131-
setKindBytes(getFixedByteString(tok.getKind())).
132-
setServiceBytes(getFixedByteString(tok.getService()));
133-
return builder.build();
129+
return ShadedProtobufHelper.protoFromToken(tok);
134130
}
135131
}

0 commit comments

Comments
 (0)