Skip to content

Commit 6f147d5

Browse files
committed
checkstyle
1 parent 6022160 commit 6f147d5

File tree

6 files changed

+25
-22
lines changed

6 files changed

+25
-22
lines changed

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1040,13 +1040,16 @@ public static BlockECReconstructionInfo convertBlockECReconstructionInfo(
10401040

10411041
byte[] liveBlkIndices = blockEcReconstructionInfoProto.getLiveBlockIndices()
10421042
.toByteArray();
1043-
byte[] excludeReconstructedIndices=blockEcReconstructionInfoProto.getExcludeReconstructedIndices()
1044-
.toByteArray();
1043+
byte[] excludeReconstructedIndices =
1044+
blockEcReconstructionInfoProto.getExcludeReconstructedIndices()
1045+
.toByteArray();
10451046
ErasureCodingPolicy ecPolicy =
10461047
PBHelperClient.convertErasureCodingPolicy(
10471048
blockEcReconstructionInfoProto.getEcPolicy());
1048-
return new BlockECReconstructionInfo(block, sourceDnInfos, targetDnInfos,
1049-
targetStorageUuids, convertStorageTypes, liveBlkIndices, excludeReconstructedIndices, ecPolicy);
1049+
return new BlockECReconstructionInfo(
1050+
block, sourceDnInfos, targetDnInfos,
1051+
targetStorageUuids, convertStorageTypes, liveBlkIndices,
1052+
excludeReconstructedIndices, ecPolicy);
10501053
}
10511054

10521055
public static BlockECReconstructionInfoProto convertBlockECRecoveryInfo(
@@ -1073,7 +1076,8 @@ public static BlockECReconstructionInfoProto convertBlockECRecoveryInfo(
10731076
builder.setLiveBlockIndices(PBHelperClient.getByteString(liveBlockIndices));
10741077

10751078
byte[] excludeReconstructedIndices = blockEcRecoveryInfo.getExcludeReconstructedIndices();
1076-
builder.setExcludeReconstructedIndices(PBHelperClient.getByteString(excludeReconstructedIndices));
1079+
builder.setExcludeReconstructedIndices(
1080+
PBHelperClient.getByteString(excludeReconstructedIndices));
10771081

10781082
builder.setEcPolicy(PBHelperClient.convertErasureCodingPolicy(
10791083
blockEcRecoveryInfo.getErasureCodingPolicy()));

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2478,7 +2478,7 @@ DatanodeDescriptor[] chooseSourceDatanodes(BlockInfo block,
24782478
List<DatanodeDescriptor> containingNodes,
24792479
List<DatanodeStorageInfo> nodesContainingLiveReplicas,
24802480
NumberReplicas numReplicas, List<Byte> liveBlockIndices,
2481-
List<Byte> liveBusyBlockIndices, List<Byte> ExcludeReconstructed , int priority) {
2481+
List<Byte> liveBusyBlockIndices, List<Byte> excludeReconstructed, int priority) {
24822482
containingNodes.clear();
24832483
nodesContainingLiveReplicas.clear();
24842484
List<DatanodeDescriptor> srcNodes = new ArrayList<>();
@@ -2549,7 +2549,7 @@ DatanodeDescriptor[] chooseSourceDatanodes(BlockInfo block,
25492549
|| state == StoredReplicaState.DECOMMISSIONING)) {
25502550
liveBusyBlockIndices.add(blockIndex);
25512551
//HDFS-16566 ExcludeReconstructed won't be reconstructed
2552-
ExcludeReconstructed.add(blockIndex);
2552+
excludeReconstructed.add(blockIndex);
25532553
}
25542554
continue; // already reached replication limit
25552555
}

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeDescriptor.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -683,7 +683,7 @@ public void addBlockToBeReplicated(Block block,
683683
*/
684684
void addBlockToBeErasureCoded(ExtendedBlock block,
685685
DatanodeDescriptor[] sources, DatanodeStorageInfo[] targets,
686-
byte[] liveBlockIndices,byte[] excludeReconstrutedIndices, ErasureCodingPolicy ecPolicy) {
686+
byte[] liveBlockIndices, byte[] excludeReconstrutedIndices, ErasureCodingPolicy ecPolicy) {
687687
assert (block != null && sources != null && sources.length > 0);
688688
BlockECReconstructionInfo task = new BlockECReconstructionInfo(block,
689689
sources, targets, liveBlockIndices, excludeReconstrutedIndices, ecPolicy);

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/erasurecode/StripedReconstructionInfo.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,8 @@ public StripedReconstructionInfo(ExtendedBlock blockGroup,
6161
private StripedReconstructionInfo(ExtendedBlock blockGroup,
6262
ErasureCodingPolicy ecPolicy, byte[] liveIndices, DatanodeInfo[] sources,
6363
byte[] targetIndices, DatanodeInfo[] targets,
64-
StorageType[] targetStorageTypes, String[] targetStorageIds, byte[] excludeReconstructedIndices) {
64+
StorageType[] targetStorageTypes, String[] targetStorageIds,
65+
byte[] excludeReconstructedIndices) {
6566

6667
this.blockGroup = blockGroup;
6768
this.ecPolicy = ecPolicy;

hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReconstructStripedFile.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -471,8 +471,8 @@ public void testProcessErasureCodingTasksSubmitionShouldSucceed()
471471
targetDnInfos_1 };
472472

473473
BlockECReconstructionInfo invalidECInfo = new BlockECReconstructionInfo(
474-
new ExtendedBlock("bp-id", 123456), dataDNs, dnStorageInfo, liveIndices,new byte[0],
475-
ecPolicy);
474+
new ExtendedBlock("bp-id", 123456), dataDNs, dnStorageInfo, liveIndices,
475+
new byte[0], ecPolicy);
476476
List<BlockECReconstructionInfo> ecTasks = new ArrayList<>();
477477
ecTasks.add(invalidECInfo);
478478
dataNode.getErasureCodingWorker().processErasureCodingTasks(ecTasks);

hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestReconstructStripedBlocks.java

Lines changed: 9 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -453,7 +453,7 @@ public void testReconstrutionWithBusyBlock1() throws Exception {
453453
int writeBytes = cellSize * dataBlocks;
454454
HdfsConfiguration conf=new HdfsConfiguration();
455455
initConf(conf);
456-
conf.setBoolean(DFSConfigKeys.DFS_DISK_BALANCER_ENABLED,false);
456+
conf.setBoolean(DFSConfigKeys.DFS_DISK_BALANCER_ENABLED, false);
457457
conf.setInt(DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY,
458458
2000);
459459
conf.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1);
@@ -501,18 +501,16 @@ public void testReconstrutionWithBusyBlock1() throws Exception {
501501
cluster.setDataNodeDead(dn.getDatanodeId());
502502

503503
//3.Whether there is excess replicas or not during the recovery?
504-
assertEquals(8,bm.countNodes(blockInfo).liveReplicas());
504+
assertEquals(8, bm.countNodes(blockInfo).liveReplicas());
505505

506506
GenericTestUtils.waitFor(
507-
() -> {
508-
System.out.println(bm.countNodes(blockInfo).liveReplicas());
509-
System.out.println(bm.countNodes(blockInfo).excessReplicas());
510-
return bm.countNodes(blockInfo).liveReplicas()==9||bm.countNodes(blockInfo).excessReplicas() >= 1||bm.countNodes(blockInfo).redundantInternalBlocks() >= 1;
511-
},
512-
10, 100000);
513-
514-
assertEquals(0,bm.countNodes(blockInfo).excessReplicas());
515-
assertEquals(9,bm.countNodes(blockInfo).liveReplicas());
507+
() -> {
508+
return bm.countNodes(blockInfo).liveReplicas()==9||bm.countNodes(blockInfo).excessReplicas() >= 1||bm.countNodes(blockInfo).redundantInternalBlocks() >= 1;
509+
},
510+
10, 100000);
511+
512+
assertEquals(0, bm.countNodes(blockInfo).excessReplicas());
513+
assertEquals(9, bm.countNodes(blockInfo).liveReplicas());
516514
}
517515

518516
}

0 commit comments

Comments
 (0)