diff --git a/BUILDING.txt b/BUILDING.txt index 640ee069e56f1..6f33a60f41103 100644 --- a/BUILDING.txt +++ b/BUILDING.txt @@ -6,7 +6,7 @@ Requirements: * Unix System * JDK 1.8 * Maven 3.3 or later -* ProtocolBuffer 2.5.0 +* ProtocolBuffer 3.7.1 * CMake 3.1 or newer (if compiling native code) * Zlib devel (if compiling native code) * Cyrus SASL devel (if compiling native code) @@ -62,8 +62,16 @@ Installing required packages for clean install of Ubuntu 14.04 LTS Desktop: $ sudo apt-get -y install maven * Native libraries $ sudo apt-get -y install build-essential autoconf automake libtool cmake zlib1g-dev pkg-config libssl-dev libsasl2-dev -* ProtocolBuffer 2.5.0 (required) - $ sudo apt-get -y install protobuf-compiler +* ProtocolBuffer 3.7.1 (required) + $ mkdir -p /opt/protobuf-3.7-src \ + && curl -L -s -S \ + https://github.com/protocolbuffers/protobuf/releases/download/v3.7.1/protobuf-java-3.7.1.tar.gz \ + -o /opt/protobuf-3.7.1.tar.gz \ + && tar xzf /opt/protobuf-3.7.1.tar.gz --strip-components 1 -C /opt/protobuf-3.7-src \ + && cd /opt/protobuf-3.7-src \ + && ./configure\ + && make install \ + && rm -rf /opt/protobuf-3.7-src Optional packages: @@ -397,11 +405,10 @@ Installing required dependencies for clean install of macOS 10.14: * Install native libraries, only openssl is required to compile native code, you may optionally install zlib, lz4, etc. $ brew install openssl -* Protocol Buffers 2.5.0 (required), since 2.5.0 is no longer in Homebrew, -we need to compile it from source - $ wget https://github.com/protocolbuffers/protobuf/releases/download/v2.5.0/protobuf-2.5.0.tar.gz - $ tar zxvf protobuf-2.5.0.tar.gz - $ cd protobuf-2.5.0 +* Protocol Buffers 3.7.1 (required) + $ wget https://github.com/protocolbuffers/protobuf/releases/download/v3.7.1/protobuf-java-3.7.1.tar.gz + $ mkdir -p protobuf-3.7 && tar zxvf protobuf-java-3.7.1.tar.gz --strip-components 1 -C protobuf-3.7 + $ cd protobuf-3.7 $ ./configure $ make $ make check @@ -432,7 +439,7 @@ Requirements: * Windows System * JDK 1.8 * Maven 3.0 or later -* ProtocolBuffer 2.5.0 +* ProtocolBuffer 3.7.1 * CMake 3.1 or newer * Visual Studio 2010 Professional or Higher * Windows SDK 8.1 (if building CPU rate control for the container executor) diff --git a/hadoop-client-modules/hadoop-client-runtime/pom.xml b/hadoop-client-modules/hadoop-client-runtime/pom.xml index cb1a2f970c768..565e9682d5194 100644 --- a/hadoop-client-modules/hadoop-client-runtime/pom.xml +++ b/hadoop-client-modules/hadoop-client-runtime/pom.xml @@ -229,6 +229,13 @@ update* + + com.google.protobuf:protobuf-java + + google/protobuf/*.proto + google/protobuf/**/*.proto + + diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RemoteException.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RemoteException.java index 36e280f39990b..f1142d35e72c2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RemoteException.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RemoteException.java @@ -66,7 +66,7 @@ public String getClassName() { * @return may be null if the code was newer than our protobuf definitions or none was given. */ public RpcErrorCodeProto getErrorCode() { - return RpcErrorCodeProto.valueOf(errorCode); + return RpcErrorCodeProto.forNumber(errorCode); } /** diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcWritable.java index 54fb98e80d858..a97af87bdfb01 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcWritable.java @@ -106,7 +106,7 @@ Message getMessage() { @Override void writeTo(ResponseBuffer out) throws IOException { int length = message.getSerializedSize(); - length += CodedOutputStream.computeRawVarint32Size(length); + length += CodedOutputStream.computeUInt32SizeNoTag(length); out.ensureCapacity(length); message.writeDelimitedTo(out); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java index df19de293f79b..36785e147d757 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java @@ -3274,10 +3274,10 @@ private byte[] setupResponseForProtobuf( cos.writeRawByte((byte)((length >>> 16) & 0xFF)); cos.writeRawByte((byte)((length >>> 8) & 0xFF)); cos.writeRawByte((byte)((length >>> 0) & 0xFF)); - cos.writeRawVarint32(header.getSerializedSize()); + cos.writeUInt32NoTag(header.getSerializedSize()); header.writeTo(cos); if (payload != null) { - cos.writeRawVarint32(payload.getSerializedSize()); + cos.writeUInt32NoTag(payload.getSerializedSize()); payload.writeTo(cos); } return buf; @@ -3285,7 +3285,7 @@ private byte[] setupResponseForProtobuf( private static int getDelimitedLength(Message message) { int length = message.getSerializedSize(); - return length + CodedOutputStream.computeRawVarint32Size(length); + return length + CodedOutputStream.computeUInt32SizeNoTag(length); } /** diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestProtoUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestProtoUtil.java index ab891b8f200d6..6b72089faab84 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestProtoUtil.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestProtoUtil.java @@ -69,7 +69,7 @@ public void testVarInt() throws IOException { private void doVarIntTest(int value) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); CodedOutputStream cout = CodedOutputStream.newInstance(baos); - cout.writeRawVarint32(value); + cout.writeUInt32NoTag(value); cout.flush(); DataInputStream dis = new DataInputStream( diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/PipelineAck.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/PipelineAck.java index be822d664f8e7..a55125f0722b4 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/PipelineAck.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/PipelineAck.java @@ -75,7 +75,7 @@ private enum StatusFormat { } static Status getStatus(int header) { - return Status.valueOf((int) STATUS.BITS.retrieve(header)); + return Status.forNumber((int) STATUS.BITS.retrieve(header)); } static ECN getECN(int header) { diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java index 1b2703755c618..691ac54ff29ee 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java @@ -293,7 +293,7 @@ public static DataChecksum.Type convert(HdfsProtos.ChecksumTypeProto type) { } public static HdfsProtos.ChecksumTypeProto convert(DataChecksum.Type type) { - return HdfsProtos.ChecksumTypeProto.valueOf(type.id); + return HdfsProtos.ChecksumTypeProto.forNumber(type.id); } public static HdfsProtos.BlockChecksumTypeProto convert( @@ -1115,7 +1115,7 @@ public static BlockStoragePolicy convert(BlockStoragePolicyProto proto) { } public static FsActionProto convert(FsAction v) { - return FsActionProto.valueOf(v != null ? v.ordinal() : 0); + return FsActionProto.forNumber(v != null ? v.ordinal() : 0); } public static XAttrProto convertXAttrProto(XAttr a) { @@ -1157,7 +1157,7 @@ public static List convertXAttrs(List xAttrSpec) { } static XAttrNamespaceProto convert(XAttr.NameSpace v) { - return XAttrNamespaceProto.valueOf(v.ordinal()); + return XAttrNamespaceProto.forNumber(v.ordinal()); } static XAttr.NameSpace convert(XAttrNamespaceProto v) { @@ -1249,7 +1249,7 @@ public static List convertAclEntry(List aclSpec) { } static AclEntryScopeProto convert(AclEntryScope v) { - return AclEntryScopeProto.valueOf(v.ordinal()); + return AclEntryScopeProto.forNumber(v.ordinal()); } private static AclEntryScope convert(AclEntryScopeProto v) { @@ -1257,7 +1257,7 @@ private static AclEntryScope convert(AclEntryScopeProto v) { } static AclEntryTypeProto convert(AclEntryType e) { - return AclEntryTypeProto.valueOf(e.ordinal()); + return AclEntryTypeProto.forNumber(e.ordinal()); } private static AclEntryType convert(AclEntryTypeProto v) { @@ -3220,7 +3220,7 @@ public static ErasureCodingPolicyState convertECState( public static HdfsProtos.ErasureCodingPolicyState convertECState( ErasureCodingPolicyState state) { - return HdfsProtos.ErasureCodingPolicyState.valueOf(state.getValue()); + return HdfsProtos.ErasureCodingPolicyState.forNumber(state.getValue()); } /** @@ -3356,7 +3356,7 @@ public static List convertAddBlockFlags( EnumSet flags) { List ret = new ArrayList<>(); for (AddBlockFlag flag : flags) { - AddBlockFlagProto abfp = AddBlockFlagProto.valueOf(flag.getMode()); + AddBlockFlagProto abfp = AddBlockFlagProto.forNumber(flag.getMode()); if (abfp != null) { ret.add(abfp); } @@ -3409,7 +3409,8 @@ public static List convertOpenFileTypes( EnumSet types) { List typeProtos = new ArrayList<>(); for (OpenFilesType type : types) { - OpenFilesTypeProto typeProto = OpenFilesTypeProto.valueOf(type.getMode()); + OpenFilesTypeProto typeProto = OpenFilesTypeProto + .forNumber(type.getMode()); if (typeProto != null) { typeProtos.add(typeProto); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/protocol/impl/pb/FederationProtocolPBTranslator.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/protocol/impl/pb/FederationProtocolPBTranslator.java index baad11352f8a2..31ab9daed3006 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/protocol/impl/pb/FederationProtocolPBTranslator.java +++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/protocol/impl/pb/FederationProtocolPBTranslator.java @@ -22,7 +22,7 @@ import org.apache.commons.codec.binary.Base64; -import com.google.protobuf.GeneratedMessage; +import com.google.protobuf.GeneratedMessageV3; import com.google.protobuf.Message; import com.google.protobuf.Message.Builder; import com.google.protobuf.MessageOrBuilder; @@ -31,7 +31,7 @@ * Helper class for setting/getting data elements in an object backed by a * protobuf implementation. */ -public class FederationProtocolPBTranslator

{ /** Optional proto byte stream used to create this object. */ diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/BlockListAsLongs.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/BlockListAsLongs.java index 77e40b4389d8c..1f17ee2201ffd 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/BlockListAsLongs.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/BlockListAsLongs.java @@ -276,12 +276,12 @@ public void add(Replica replica) { try { // zig-zag to reduce size of legacy blocks cos.writeSInt64NoTag(replica.getBlockId()); - cos.writeRawVarint64(replica.getBytesOnDisk()); - cos.writeRawVarint64(replica.getGenerationStamp()); + cos.writeUInt64NoTag(replica.getBytesOnDisk()); + cos.writeUInt64NoTag(replica.getGenerationStamp()); ReplicaState state = replica.getState(); // although state is not a 64-bit value, using a long varint to // allow for future use of the upper bits - cos.writeRawVarint64(state.getValue()); + cos.writeUInt64NoTag(state.getValue()); if (state == ReplicaState.FINALIZED) { numFinalized++; } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java index 51379b8657392..5950fa61d85b7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java @@ -1015,8 +1015,9 @@ private SectionName(String name) { } } - private static int getOndiskTrunkSize(com.google.protobuf.GeneratedMessage s) { - return CodedOutputStream.computeRawVarint32Size(s.getSerializedSize()) + private static int getOndiskTrunkSize( + com.google.protobuf.GeneratedMessageV3 s) { + return CodedOutputStream.computeUInt32SizeNoTag(s.getSerializedSize()) + s.getSerializedSize(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java index cec44f51fe046..e84dec598aa6a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java @@ -433,7 +433,7 @@ private void dumpXattrs(INodeSection.XAttrFeatureProto xattrs) { int ns = (XATTR_NAMESPACE_MASK & (encodedName >> XATTR_NAMESPACE_OFFSET)) | ((XATTR_NAMESPACE_EXT_MASK & (encodedName >> XATTR_NAMESPACE_EXT_OFFSET)) << 2); o(INODE_SECTION_NS, XAttrProtos.XAttrProto. - XAttrNamespaceProto.valueOf(ns).toString()); + XAttrNamespaceProto.forNumber(ns).toString()); o(SECTION_NAME, SerialNumberManager.XATTR.getString( XATTR_NAME_MASK & (encodedName >> XATTR_NAME_OFFSET), stringTable)); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java index 22f84c5395946..7b635382574b0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java @@ -910,7 +910,7 @@ public void testFSServerDefaultsHelper() { b.setFileBufferSize(DFSConfigKeys.IO_FILE_BUFFER_SIZE_DEFAULT); b.setEncryptDataTransfer(DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_DEFAULT); b.setTrashInterval(DFSConfigKeys.FS_TRASH_INTERVAL_DEFAULT); - b.setChecksumType(HdfsProtos.ChecksumTypeProto.valueOf( + b.setChecksumType(HdfsProtos.ChecksumTypeProto.forNumber( DataChecksum.Type.valueOf(DFSConfigKeys.DFS_CHECKSUM_TYPE_DEFAULT).id)); HdfsProtos.FsServerDefaultsProto proto = b.build(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestLargeBlockReport.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestLargeBlockReport.java index f5a68925c3a6a..99dc783c86a35 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestLargeBlockReport.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestLargeBlockReport.java @@ -75,6 +75,9 @@ public void tearDown() { @Test public void testBlockReportExceedsLengthLimit() throws Exception { + //protobuf's default limit increased to 2GB from protobuf 3.x onwards. + //So there will not be any exception thrown from protobuf. + conf.setInt(IPC_MAXIMUM_DATA_LENGTH, IPC_MAXIMUM_DATA_LENGTH_DEFAULT / 2); initCluster(); // Create a large enough report that we expect it will go beyond the RPC // server's length validation, and also protobuf length validation. diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index 0aa84dcdb179d..9f85148fbf91b 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -84,7 +84,7 @@ - 2.5.0 + 3.7.1 ${env.HADOOP_PROTOC_PATH} 3.4.13 @@ -1918,6 +1918,9 @@ false + + /opt/protobuf-3.7/bin/protoc + diff --git a/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/ImageWriter.java b/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/ImageWriter.java index d4a632cf06dcf..a499c8ac000b0 100644 --- a/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/ImageWriter.java +++ b/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/ImageWriter.java @@ -266,8 +266,8 @@ synchronized void writeDirEntry(DirEntry e) throws IOException { e.writeDelimitedTo(dirs); } - private static int getOndiskSize(com.google.protobuf.GeneratedMessage s) { - return CodedOutputStream.computeRawVarint32Size(s.getSerializedSize()) + private static int getOndiskSize(com.google.protobuf.GeneratedMessageV3 s) { + return CodedOutputStream.computeUInt32SizeNoTag(s.getSerializedSize()) + s.getSerializedSize(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/pom.xml index 21ace7d4638c7..91b5d8d22c390 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/pom.xml +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/pom.xml @@ -55,6 +55,7 @@ org.apache.hadoop hadoop-annotations + com.google.protobuf protobuf-java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/pb/PlacementConstraintToProtoConverter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/pb/PlacementConstraintToProtoConverter.java index 30f774136dcfe..caf254a2b3ef4 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/pb/PlacementConstraintToProtoConverter.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/pb/PlacementConstraintToProtoConverter.java @@ -41,7 +41,7 @@ import org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto; import org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto; -import com.google.protobuf.GeneratedMessage; +import com.google.protobuf.GeneratedMessageV3; /** * {@code PlacementConstraintToProtoConverter} generates a @@ -50,7 +50,7 @@ */ @Private public class PlacementConstraintToProtoConverter - implements PlacementConstraint.Visitor { + implements PlacementConstraint.Visitor { private PlacementConstraint placementConstraint; @@ -65,7 +65,7 @@ public PlacementConstraintProto convert() { } @Override - public GeneratedMessage visit(SingleConstraint constraint) { + public GeneratedMessageV3 visit(SingleConstraint constraint) { SimplePlacementConstraintProto.Builder sb = SimplePlacementConstraintProto.newBuilder(); @@ -94,7 +94,7 @@ public GeneratedMessage visit(SingleConstraint constraint) { } @Override - public GeneratedMessage visit(TargetExpression target) { + public GeneratedMessageV3 visit(TargetExpression target) { PlacementConstraintTargetProto.Builder tb = PlacementConstraintTargetProto.newBuilder(); @@ -109,16 +109,16 @@ public GeneratedMessage visit(TargetExpression target) { } @Override - public GeneratedMessage visit(TargetConstraint constraint) { + public GeneratedMessageV3 visit(TargetConstraint constraint) { throw new YarnRuntimeException("Unexpected TargetConstraint found."); } @Override - public GeneratedMessage visit(CardinalityConstraint constraint) { + public GeneratedMessageV3 visit(CardinalityConstraint constraint) { throw new YarnRuntimeException("Unexpected CardinalityConstraint found."); } - private GeneratedMessage visitAndOr( + private GeneratedMessageV3 visitAndOr( CompositeConstraint composite, CompositeType type) { CompositePlacementConstraintProto.Builder cb = CompositePlacementConstraintProto.newBuilder(); @@ -137,17 +137,17 @@ private GeneratedMessage visitAndOr( } @Override - public GeneratedMessage visit(And constraint) { + public GeneratedMessageV3 visit(And constraint) { return visitAndOr(constraint, CompositeType.AND); } @Override - public GeneratedMessage visit(Or constraint) { + public GeneratedMessageV3 visit(Or constraint) { return visitAndOr(constraint, CompositeType.OR); } @Override - public GeneratedMessage visit(DelayedOr constraint) { + public GeneratedMessageV3 visit(DelayedOr constraint) { CompositePlacementConstraintProto.Builder cb = CompositePlacementConstraintProto.newBuilder(); @@ -166,7 +166,7 @@ public GeneratedMessage visit(DelayedOr constraint) { } @Override - public GeneratedMessage visit(TimedPlacementConstraint constraint) { + public GeneratedMessageV3 visit(TimedPlacementConstraint constraint) { TimedPlacementConstraintProto.Builder tb = TimedPlacementConstraintProto.newBuilder(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/NodePublishVolumeRequestPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/NodePublishVolumeRequestPBImpl.java index c3590230c5399..356385630091f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/NodePublishVolumeRequestPBImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/NodePublishVolumeRequestPBImpl.java @@ -154,10 +154,10 @@ public void setVolumeCapability( CsiAdaptorProtos.VolumeCapability vc = CsiAdaptorProtos.VolumeCapability.newBuilder() .setAccessMode(CsiAdaptorProtos.VolumeCapability - .AccessMode.valueOf( + .AccessMode.forNumber( capability.getAccessMode().ordinal())) .setVolumeType(CsiAdaptorProtos.VolumeCapability - .VolumeType.valueOf(capability.getVolumeType().ordinal())) + .VolumeType.forNumber(capability.getVolumeType().ordinal())) .addAllMountFlags(capability.getMountFlags()) .build(); builder.setVolumeCapability(vc); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/ValidateVolumeCapabilitiesRequestPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/ValidateVolumeCapabilitiesRequestPBImpl.java index 14bd89dc46cd9..bf3f4f55b24f9 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/ValidateVolumeCapabilitiesRequestPBImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/ValidateVolumeCapabilitiesRequestPBImpl.java @@ -68,9 +68,9 @@ public void addVolumeCapability(VolumeCapability volumeCapability) { CsiAdaptorProtos.VolumeCapability vc = CsiAdaptorProtos.VolumeCapability.newBuilder() .setAccessMode(CsiAdaptorProtos.VolumeCapability.AccessMode - .valueOf(volumeCapability.getAccessMode().ordinal())) + .forNumber(volumeCapability.getAccessMode().ordinal())) .setVolumeType(CsiAdaptorProtos.VolumeCapability.VolumeType - .valueOf(volumeCapability.getVolumeType().ordinal())) + .forNumber(volumeCapability.getVolumeType().ordinal())) .addAllMountFlags(volumeCapability.getMountFlags()) .build(); builder.addVolumeCapabilities(vc);