Skip to content

Commit e68c61d

Browse files
authored
HBASE-26928 Fix several indentation problems (#4323)
Signed-off-by: Xiaolin Ha <haxiaolin@apache.org>
1 parent ae3718b commit e68c61d

File tree

4 files changed

+67
-53
lines changed

4 files changed

+67
-53
lines changed

hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -497,16 +497,17 @@ public static void decrypt(OutputStream out, InputStream in, int outLen,
497497
* @return a key for the given subject
498498
* @throws IOException if the key is not found
499499
*/
500-
public static Key getSecretKeyForSubject(String subject, Configuration conf)
501-
throws IOException {
500+
public static Key getSecretKeyForSubject(String subject, Configuration conf) throws IOException {
502501
KeyProvider provider = getKeyProvider(conf);
503-
if (provider != null) try {
504-
Key[] keys = provider.getKeys(new String[] { subject });
505-
if (keys != null && keys.length > 0) {
506-
return keys[0];
502+
if (provider != null) {
503+
try {
504+
Key[] keys = provider.getKeys(new String[] { subject });
505+
if (keys != null && keys.length > 0) {
506+
return keys[0];
507+
}
508+
} catch (Exception e) {
509+
throw new IOException(e);
507510
}
508-
} catch (Exception e) {
509-
throw new IOException(e);
510511
}
511512
throw new IOException("No key found for subject '" + subject + "'");
512513
}

hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpointTracing.java

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -290,14 +290,10 @@ public void traceSyncTableEndpointCallAndCallback() throws Exception {
290290
final ConcurrentMap<byte[], EchoResponseProto> results = new ConcurrentHashMap<>();
291291
TraceUtil.trace(() -> {
292292
try {
293-
table.coprocessorService(TestProtobufRpcProto.class, null, null,
294-
t -> {
295-
t.echo(controller, request, callback);
296-
return callback.get();
297-
},
298-
(region, row, result) -> {
299-
results.put(region, result);
300-
});
293+
table.coprocessorService(TestProtobufRpcProto.class, null, null, t -> {
294+
t.echo(controller, request, callback);
295+
return callback.get();
296+
}, (region, row, result) -> results.put(region, result));
301297
} catch (Throwable t) {
302298
throw new RuntimeException(t);
303299
}

hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java

Lines changed: 51 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -223,10 +223,12 @@ Response update(final CellSetModel model, final boolean replace) {
223223
int i = 0;
224224
for (CellModel cell: row.getCells()) {
225225
byte[] col = cell.getColumn();
226-
if (col == null) try {
227-
col = rowspec.getColumns()[i++];
228-
} catch (ArrayIndexOutOfBoundsException e) {
229-
col = null;
226+
if (col == null) {
227+
try {
228+
col = rowspec.getColumns()[i++];
229+
} catch (ArrayIndexOutOfBoundsException e) {
230+
col = null;
231+
}
230232
}
231233
if (col == null) {
232234
servlet.getMetrics().incrementFailedPutRequests(1);
@@ -263,10 +265,12 @@ Response update(final CellSetModel model, final boolean replace) {
263265
servlet.getMetrics().incrementFailedPutRequests(1);
264266
return processException(e);
265267
} finally {
266-
if (table != null) try {
267-
table.close();
268-
} catch (IOException ioe) {
269-
LOG.debug("Exception received while closing the table", ioe);
268+
if (table != null) {
269+
try {
270+
table.close();
271+
} catch (IOException ioe) {
272+
LOG.debug("Exception received while closing the table", ioe);
273+
}
270274
}
271275
}
272276
}
@@ -334,10 +338,12 @@ Response updateBinary(final byte[] message, final HttpHeaders headers,
334338
servlet.getMetrics().incrementFailedPutRequests(1);
335339
return processException(e);
336340
} finally {
337-
if (table != null) try {
338-
table.close();
339-
} catch (IOException ioe) {
340-
LOG.debug("Exception received while closing the table", ioe);
341+
if (table != null) {
342+
try {
343+
table.close();
344+
} catch (IOException ioe) {
345+
LOG.debug("Exception received while closing the table", ioe);
346+
}
341347
}
342348
}
343349
}
@@ -399,10 +405,11 @@ public Response delete(final @Context UriInfo uriInfo) {
399405
.build();
400406
}
401407
Delete delete = null;
402-
if (rowspec.hasTimestamp())
408+
if (rowspec.hasTimestamp()) {
403409
delete = new Delete(rowspec.getRow(), rowspec.getTimestamp());
404-
else
410+
} else {
405411
delete = new Delete(rowspec.getRow());
412+
}
406413

407414
for (byte[] column: rowspec.getColumns()) {
408415
byte[][] split = CellUtil.parseColumn(column);
@@ -440,10 +447,12 @@ public Response delete(final @Context UriInfo uriInfo) {
440447
servlet.getMetrics().incrementFailedDeleteRequests(1);
441448
return processException(e);
442449
} finally {
443-
if (table != null) try {
444-
table.close();
445-
} catch (IOException ioe) {
446-
LOG.debug("Exception received while closing the table", ioe);
450+
if (table != null) {
451+
try {
452+
table.close();
453+
} catch (IOException ioe) {
454+
LOG.debug("Exception received while closing the table", ioe);
455+
}
447456
}
448457
}
449458
return Response.ok().build();
@@ -557,10 +566,12 @@ Response checkAndPut(final CellSetModel model) {
557566
servlet.getMetrics().incrementFailedPutRequests(1);
558567
return processException(e);
559568
} finally {
560-
if (table != null) try {
561-
table.close();
562-
} catch (IOException ioe) {
563-
LOG.debug("Exception received while closing the table", ioe);
569+
if (table != null) {
570+
try {
571+
table.close();
572+
} catch (IOException ioe) {
573+
LOG.debug("Exception received while closing the table", ioe);
574+
}
564575
}
565576
}
566577
}
@@ -687,10 +698,12 @@ Response checkAndDelete(final CellSetModel model) {
687698
servlet.getMetrics().incrementFailedDeleteRequests(1);
688699
return processException(e);
689700
} finally {
690-
if (table != null) try {
691-
table.close();
692-
} catch (IOException ioe) {
693-
LOG.debug("Exception received while closing the table", ioe);
701+
if (table != null) {
702+
try {
703+
table.close();
704+
} catch (IOException ioe) {
705+
LOG.debug("Exception received while closing the table", ioe);
706+
}
694707
}
695708
}
696709
}
@@ -781,10 +794,12 @@ Response append(final CellSetModel model) {
781794
servlet.getMetrics().incrementFailedAppendRequests(1);
782795
return processException(e);
783796
} finally {
784-
if (table != null) try {
785-
table.close();
786-
} catch (IOException ioe) {
787-
LOG.debug("Exception received while closing the table" + table.getName(), ioe);
797+
if (table != null) {
798+
try {
799+
table.close();
800+
} catch (IOException ioe) {
801+
LOG.debug("Exception received while closing the table" + table.getName(), ioe);
802+
}
788803
}
789804
}
790805
}
@@ -878,10 +893,12 @@ Response increment(final CellSetModel model) {
878893
servlet.getMetrics().incrementFailedIncrementRequests(1);
879894
return processException(e);
880895
} finally {
881-
if (table != null) try {
882-
table.close();
883-
} catch (IOException ioe) {
884-
LOG.debug("Exception received while closing the table " + table.getName(), ioe);
896+
if (table != null) {
897+
try {
898+
table.close();
899+
} catch (IOException ioe) {
900+
LOG.debug("Exception received while closing the table " + table.getName(), ioe);
901+
}
885902
}
886903
}
887904
}

hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRefreshRecoveredReplication.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -141,9 +141,9 @@ public void testReplicationRefreshSource() throws Exception {
141141
Replication replication = (Replication)otherServer.getReplicationSourceService();
142142
UTIL1.waitFor(60000, () -> !replication.getReplicationManager().getOldSources().isEmpty());
143143
// Wait on only one server being up.
144-
UTIL1.waitFor(60000, () ->
145-
// Have to go back to source here because getLiveRegionServerThreads makes new array each time
146-
UTIL1.getMiniHBaseCluster().getLiveRegionServerThreads().size() == NUM_SLAVES1 - 1);
144+
// Have to go back to source here because getLiveRegionServerThreads makes new array each time
145+
UTIL1.waitFor(60000,
146+
() -> UTIL1.getMiniHBaseCluster().getLiveRegionServerThreads().size() == NUM_SLAVES1 - 1);
147147
UTIL1.waitTableAvailable(tablename);
148148
LOG.info("Available {}", tablename);
149149

0 commit comments

Comments
 (0)