diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/RestoreDriver.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/RestoreDriver.java index 4e9056314c64..41ce5d4d1df0 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/RestoreDriver.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/RestoreDriver.java @@ -180,7 +180,7 @@ private int parseAndRun(String[] args) throws IOException { client.restore(BackupUtils.createRestoreRequest(backupRootDir, backupId, check, sTableArray, tTableArray, overwrite)); } catch (Exception e) { - e.printStackTrace(); + LOG.error("Error while running restore backup", e); return -5; } return 0; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientAsyncPrefetchScanner.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientAsyncPrefetchScanner.java index 4529cf5fe903..e5af8717a827 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientAsyncPrefetchScanner.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientAsyncPrefetchScanner.java @@ -146,7 +146,6 @@ private void handleException() throws IOException { // Rethrow the exception so the application can handle it. while (!exceptionsQueue.isEmpty()) { Exception first = exceptionsQueue.peek(); - first.printStackTrace(); if (first instanceof IOException) { throw (IOException) first; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java index a0be0bfc1bc5..4a31cff4a718 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java @@ -279,7 +279,6 @@ static void throwEnrichedException(ExecutionException e, int retries) throws RetriesExhaustedException, DoNotRetryIOException { Throwable t = e.getCause(); assert t != null; // That's what ExecutionException is about: holding an exception - t.printStackTrace(); if (t instanceof RetriesExhaustedException) { throw (RetriesExhaustedException) t; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java index 716322cff9cf..5428ed874419 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java @@ -244,27 +244,28 @@ public Filter parseSimpleFilterExpression (byte [] filterStringAsByteArray) throws CharacterCodingException { String filterName = Bytes.toString(getFilterName(filterStringAsByteArray)); - ArrayList filterArguments = getFilterArguments(filterStringAsByteArray); + ArrayList filterArguments = getFilterArguments(filterStringAsByteArray); if (!filterHashMap.containsKey(filterName)) { throw new IllegalArgumentException("Filter Name " + filterName + " not supported"); } + filterName = filterHashMap.get(filterName); + final String methodName = "createFilterFromArguments"; try { - filterName = filterHashMap.get(filterName); Class c = Class.forName(filterName); - Class[] argTypes = new Class [] {ArrayList.class}; - Method m = c.getDeclaredMethod("createFilterFromArguments", argTypes); - return (Filter) m.invoke(null,filterArguments); + Class[] argTypes = new Class[] { ArrayList.class }; + Method m = c.getDeclaredMethod(methodName, argTypes); + return (Filter) m.invoke(null, filterArguments); } catch (ClassNotFoundException e) { - e.printStackTrace(); + LOG.error("Could not find class {}", filterName, e); } catch (NoSuchMethodException e) { - e.printStackTrace(); + LOG.error("Could not find method {} in {}", methodName, filterName, e); } catch (IllegalAccessException e) { - e.printStackTrace(); + LOG.error("Unable to access specified class {}", filterName, e); } catch (InvocationTargetException e) { - e.printStackTrace(); + LOG.error("Method {} threw an exception for {}", methodName, filterName, e); } - throw new IllegalArgumentException("Incorrect filter string " + - new String(filterStringAsByteArray, StandardCharsets.UTF_8)); + throw new IllegalArgumentException( + "Incorrect filter string " + new String(filterStringAsByteArray, StandardCharsets.UTF_8)); } /** diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java index 1248f874dd3c..3273f4926189 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java @@ -35,6 +35,8 @@ import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Sample Uploader MapReduce @@ -60,6 +62,7 @@ */ @InterfaceAudience.Private public class SampleUploader extends Configured implements Tool { + private static final Logger LOG = LoggerFactory.getLogger(SampleUploader.class); private static final String NAME = "SampleUploader"; @@ -100,7 +103,8 @@ public void map(LongWritable key, Text line, Context context) try { context.write(new ImmutableBytesWritable(row), put); } catch (InterruptedException e) { - e.printStackTrace(); + LOG.error("Interrupted emitting put", e); + Thread.currentThread().interrupt(); } // Set status every checkpoint lines diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java index 07486bfaf0cf..4c3a99c4b366 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java @@ -48,12 +48,15 @@ import org.apache.thrift.transport.TSocket; import org.apache.thrift.transport.TTransport; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * See the instructions under hbase-examples/README.txt */ @InterfaceAudience.Private public class DemoClient { + private static final Logger LOG = LoggerFactory.getLogger(DemoClient.class); static protected int port; static protected String host; @@ -110,15 +113,15 @@ private String utf8(byte[] buf) { } } - // Helper to translate strings to UTF8 bytes - private byte[] bytes(String s) { - try { - return s.getBytes("UTF-8"); - } catch (UnsupportedEncodingException e) { - e.printStackTrace(); - return null; - } + // Helper to translate strings to UTF8 bytes + private byte[] bytes(String s) { + try { + return s.getBytes("UTF-8"); + } catch (UnsupportedEncodingException e) { + LOG.error("CharSetName {} not supported", s, e); + return null; } + } private void run() throws Exception { TTransport transport = new TSocket(host, port); diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java index 422d405bb03a..fd214d17535b 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java @@ -53,12 +53,15 @@ import org.ietf.jgss.GSSManager; import org.ietf.jgss.GSSName; import org.ietf.jgss.Oid; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * See the instructions under hbase-examples/README.txt */ @InterfaceAudience.Private public class HttpDoAsClient { + private static final Logger LOG = LoggerFactory.getLogger(HttpDoAsClient.class); static protected int port; static protected String host; @@ -113,7 +116,7 @@ private byte[] bytes(String s) { try { return s.getBytes("UTF-8"); } catch (UnsupportedEncodingException e) { - e.printStackTrace(); + LOG.error("CharSetName {} not supported", s, e); return null; } } @@ -188,7 +191,7 @@ private Hbase.Client refresh(Hbase.Client client, THttpClient httpClient) { try { httpClient.setCustomHeader("Authorization", generateTicket()); } catch (GSSException e) { - e.printStackTrace(); + LOG.error("Kerberos authentication failed", e); } } return client; diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java index 0427f50ffec2..0907fd49cd2f 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java @@ -40,6 +40,8 @@ import org.apache.hadoop.mapred.OutputFormat; import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hadoop.mapred.TextOutputFormat; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.Collection; @@ -51,6 +53,7 @@ @InterfaceAudience.Public @SuppressWarnings({ "rawtypes", "unchecked" }) public class TableMapReduceUtil { + private static final Logger LOG = LoggerFactory.getLogger(TableMapReduceUtil.class); /** * Use this before submitting a TableMap job. It will @@ -110,14 +113,14 @@ public static void initTableMapJob(String table, String columns, try { addDependencyJars(job); } catch (IOException e) { - e.printStackTrace(); + LOG.error("IOException encountered while adding dependency jars", e); } } try { initCredentials(job); } catch (IOException ioe) { // just spit out the stack trace? really? - ioe.printStackTrace(); + LOG.error("IOException encountered while initializing credentials", ioe); } } @@ -310,7 +313,7 @@ public static void initCredentials(JobConf job) throws IOException { User user = userProvider.getCurrent(); TokenUtil.addTokenForJob(conn, job, user); } catch (InterruptedException ie) { - ie.printStackTrace(); + LOG.error("Interrupted obtaining user authentication token", ie); Thread.currentThread().interrupt(); } finally { conn.close(); diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java index ff0f01ca19f1..f00a0f08e705 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java @@ -176,7 +176,8 @@ public void map(ImmutableBytesWritable row, Result values, context.getCounter(Counters.CELLS).increment(cellCount); } } catch (InterruptedException e) { - e.printStackTrace(); + LOG.error("Interrupted while writing cellCount", e); + Thread.currentThread().interrupt(); } } } diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java index 2e9e62cf3733..b20f07fa7a7f 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java @@ -339,7 +339,7 @@ private boolean doCommandLine(final String[] args) { dstTableName = tableName; } } catch (Exception e) { - e.printStackTrace(); + LOG.error("Failed to parse commandLine arguments", e); printUsage("Can't start because " + e.getMessage()); return false; } diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java index b67225e70d61..2d742fc6d2b8 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java @@ -713,7 +713,7 @@ private boolean doCommandLine(final String[] args) { } } catch (Exception e) { - e.printStackTrace(); + LOG.error("Failed to parse commandLine arguments", e); printUsage("Can't start because " + e.getMessage()); return false; } diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java index f7405fde8383..7805bc5dff27 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java @@ -222,7 +222,8 @@ public void map(ImmutableBytesWritable row, Result value, } } } catch (InterruptedException e) { - e.printStackTrace(); + LOG.error("Interrupted while emitting Cell", e); + Thread.currentThread().interrupt(); } } @@ -286,7 +287,8 @@ public void map(ImmutableBytesWritable row, Result value, } } } catch (InterruptedException e) { - e.printStackTrace(); + LOG.error("Interrupted while emitting Cell", e); + Thread.currentThread().interrupt(); } } @@ -319,7 +321,8 @@ public void map(ImmutableBytesWritable row, Result value, try { writeResult(row, value, context); } catch (InterruptedException e) { - e.printStackTrace(); + LOG.error("Interrupted while writing result", e); + Thread.currentThread().interrupt(); } } diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java index 32b7561dda46..a9688702f869 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java @@ -779,7 +779,7 @@ private boolean doCommandLine(final String[] args) { } catch (Exception e) { - e.printStackTrace(); + LOG.error("Failed to parse commandLine arguments", e); printUsage("Can't start because " + e.getMessage()); return false; } diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java index 8dc7156d099a..5d406195d40b 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java @@ -39,14 +39,16 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Counter; import org.apache.hadoop.mapreduce.Mapper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Write table content out to files in hdfs. */ @InterfaceAudience.Public public class TsvImporterMapper -extends Mapper -{ + extends Mapper { + private static final Logger LOG = LoggerFactory.getLogger(TsvImporterMapper.class); /** Timestamp for all inserted rows */ protected long ts; @@ -199,7 +201,8 @@ public void map(LongWritable offset, Text value, } throw new IOException(badLine); } catch (InterruptedException e) { - e.printStackTrace(); + LOG.error("Interrupted while emitting put", e); + Thread.currentThread().interrupt(); } } diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterTextMapper.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterTextMapper.java index f3f81ec1a717..0127f26955c3 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterTextMapper.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterTextMapper.java @@ -28,14 +28,16 @@ import org.apache.hadoop.mapreduce.Counter; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.conf.Configuration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Write table content out to map output files. */ @InterfaceAudience.Public public class TsvImporterTextMapper -extends Mapper -{ + extends Mapper { + private static final Logger LOG = LoggerFactory.getLogger(TsvImporterTextMapper.class); /** Column seperator */ private String separator; @@ -121,7 +123,7 @@ public void map(LongWritable offset, Text value, Context context) throws IOExcep } throw new IOException(badLine); } catch (InterruptedException e) { - e.printStackTrace(); + LOG.error("Interrupted while emitting TSV text", e); Thread.currentThread().interrupt(); } } diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java index fe8ff9410028..aa61316ebde2 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java @@ -123,7 +123,8 @@ public void map(WALKey key, WALEdit value, Context context) throws IOException { } } } catch (InterruptedException e) { - e.printStackTrace(); + LOG.error("Interrupted while emitting Cell", e); + Thread.currentThread().interrupt(); } } @@ -199,7 +200,8 @@ public void map(WALKey key, WALEdit value, Context context) throws IOException { } } } catch (InterruptedException e) { - e.printStackTrace(); + LOG.error("Interrupted while writing results", e); + Thread.currentThread().interrupt(); } } diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java index 85eebc50bdc8..d1b5c607fbd3 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java @@ -644,7 +644,7 @@ public boolean doCommandLine(final String[] args) { } } catch (Exception e) { - e.printStackTrace(); + LOG.error("Failed to parse commandLine arguments", e); printUsage("Can't start because " + e.getMessage()); return false; } diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALPrettyPrinter.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALPrettyPrinter.java index c692365b032b..b74efe4d905a 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALPrettyPrinter.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALPrettyPrinter.java @@ -32,6 +32,8 @@ import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.procedure2.Procedure; import org.apache.hadoop.hbase.procedure2.ProcedureUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry; @@ -51,6 +53,8 @@ @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS) @InterfaceStability.Evolving public class ProcedureWALPrettyPrinter extends Configured implements Tool { + private static final Logger LOG = LoggerFactory.getLogger(ProcedureWALPrettyPrinter.class); + private final PrintStream out; public ProcedureWALPrettyPrinter() { @@ -171,7 +175,7 @@ public int run(final String[] args) throws IOException { return(-1); } } catch (ParseException e) { - e.printStackTrace(); + LOG.error("Failed to parse commandLine arguments", e); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("ProcedureWALPrettyPrinter ", options, true); return(-1); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java index 19143932b5a1..d3e62dc39001 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java @@ -298,7 +298,8 @@ public String waitOnRegionServer(JVMClusterUtil.RegionServerThread rst) { LOG.info("Waiting on " + rst.getRegionServer().toString()); rst.join(); } catch (InterruptedException e) { - e.printStackTrace(); + LOG.error("Interrupted while waiting for {} to finish. Retrying join", rst.getName(), e); + Thread.currentThread().interrupt(); } } regionThreads.remove(rst); @@ -370,7 +371,9 @@ public String waitOnMaster(JVMClusterUtil.MasterThread masterThread) { LOG.info("Waiting on " + masterThread.getMaster().getServerName().toString()); masterThread.join(); } catch (InterruptedException e) { - e.printStackTrace(); + LOG.error("Interrupted while waiting for {} to finish. Retrying join", + masterThread.getName(), e); + Thread.currentThread().interrupt(); } } masterThreads.remove(masterThread); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java index 1feeeeb6e26b..489894e8c666 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java @@ -315,7 +315,7 @@ public void run() { try { this.zkcluster.shutdown(); } catch (IOException e) { - e.printStackTrace(); + LOG.error("Failed to shutdown MiniZooKeeperCluster", e); } } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java index 85877febec65..3fa496fea1ae 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java @@ -50,6 +50,8 @@ import org.apache.hbase.thirdparty.org.apache.commons.cli.PosixParser; import com.fasterxml.jackson.databind.ObjectMapper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * WALPrettyPrinter prints the contents of a given WAL with a variety of @@ -67,6 +69,8 @@ @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS) @InterfaceStability.Evolving public class WALPrettyPrinter { + private static final Logger LOG = LoggerFactory.getLogger(WALPrettyPrinter.class); + private boolean outputValues; private boolean outputJSON; // The following enable filtering by sequence, region, and row, respectively @@ -400,7 +404,7 @@ public static void run(String[] args) throws IOException { if (cmd.hasOption("w")) printer.setRowFilter(cmd.getOptionValue("w")); } catch (ParseException e) { - e.printStackTrace(); + LOG.error("Failed to parse commandLine arguments", e); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("HFile filename(s) ", options, true); System.exit(-1); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java index 3e755847514d..fc0032705efb 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java @@ -206,7 +206,7 @@ public void stop() { try { this.infoServer.stop(); } catch (Exception ex) { - ex.printStackTrace(); + LOG.error("Failed to stop infoServer", ex); } } serverRunner.shutdown(); diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java index 019351587977..8e70611e8643 100644 --- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java +++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java @@ -44,6 +44,8 @@ import org.apache.zookeeper.server.ZooKeeperServerMain; import org.apache.zookeeper.server.quorum.QuorumPeerConfig; import org.apache.zookeeper.server.quorum.QuorumPeerMain; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * HBase's version of ZooKeeper's QuorumPeer. When HBase is set to manage @@ -55,6 +57,8 @@ @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS) @InterfaceStability.Evolving public final class HQuorumPeer { + private static final Logger LOG = LoggerFactory.getLogger(HQuorumPeer.class); + private HQuorumPeer() { } @@ -77,7 +81,7 @@ public static void main(String[] args) { runZKServer(zkConfig); } catch (Exception e) { - e.printStackTrace(); + LOG.error("Failed to start ZKServer", e); System.exit(-1); } }