From 45cb862763fb0aee1ac865b5bb747a9c7facb2e8 Mon Sep 17 00:00:00 2001 From: "subrat.mishra" Date: Tue, 14 Aug 2018 15:47:56 +0530 Subject: [PATCH] HBASE-21040: Cleanup printstackTrace to proper Logger call. --- .../apache/hadoop/hbase/backup/RestoreDriver.java | 2 +- .../hbase/client/ClientAsyncPrefetchScanner.java | 5 ++++- .../client/RpcRetryingCallerWithReadReplicas.java | 2 +- .../apache/hadoop/hbase/filter/ParseFilter.java | 23 +++++++++++----------- .../hadoop/hbase/mapreduce/SampleUploader.java | 5 ++++- .../org/apache/hadoop/hbase/thrift/DemoClient.java | 5 ++++- .../apache/hadoop/hbase/thrift/HttpDoAsClient.java | 7 +++++-- .../hadoop/hbase/mapred/TableMapReduceUtil.java | 9 ++++++--- .../apache/hadoop/hbase/mapreduce/CellCounter.java | 2 +- .../apache/hadoop/hbase/mapreduce/CopyTable.java | 2 +- .../apache/hadoop/hbase/mapreduce/HashTable.java | 2 +- .../org/apache/hadoop/hbase/mapreduce/Import.java | 6 +++--- .../apache/hadoop/hbase/mapreduce/SyncTable.java | 2 +- .../hadoop/hbase/mapreduce/TsvImporterMapper.java | 8 +++++--- .../hbase/mapreduce/TsvImporterTextMapper.java | 8 +++++--- .../apache/hadoop/hbase/mapreduce/WALPlayer.java | 4 ++-- .../mapreduce/replication/VerifyReplication.java | 2 +- .../store/wal/ProcedureWALPrettyPrinter.java | 6 +++++- .../org/apache/hadoop/hbase/LocalHBaseCluster.java | 4 ++-- .../hadoop/hbase/master/HMasterCommandLine.java | 2 +- .../apache/hadoop/hbase/wal/WALPrettyPrinter.java | 6 +++++- .../apache/hadoop/hbase/thrift/ThriftServer.java | 2 +- .../apache/hadoop/hbase/zookeeper/HQuorumPeer.java | 6 +++++- 23 files changed, 76 insertions(+), 44 deletions(-) diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/RestoreDriver.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/RestoreDriver.java index 4e9056314c..41ce5d4d1d 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/RestoreDriver.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/RestoreDriver.java @@ -180,7 +180,7 @@ public class RestoreDriver extends AbstractHBaseTool { client.restore(BackupUtils.createRestoreRequest(backupRootDir, backupId, check, sTableArray, tTableArray, overwrite)); } catch (Exception e) { - e.printStackTrace(); + LOG.error("Error while running restore backup", e); return -5; } return 0; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientAsyncPrefetchScanner.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientAsyncPrefetchScanner.java index 4529cf5fe9..a5fcad2a37 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientAsyncPrefetchScanner.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientAsyncPrefetchScanner.java @@ -39,6 +39,8 @@ import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.ipc.RpcControllerFactory; import org.apache.hadoop.hbase.util.Threads; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * ClientAsyncPrefetchScanner implements async scanner behaviour. @@ -52,6 +54,7 @@ import org.apache.hadoop.hbase.util.Threads; */ @InterfaceAudience.Private public class ClientAsyncPrefetchScanner extends ClientSimpleScanner { + private static final Logger LOG = LoggerFactory.getLogger(ClientAsyncPrefetchScanner.class); private long maxCacheSize; private AtomicLong cacheSizeInBytes; @@ -146,7 +149,7 @@ public class ClientAsyncPrefetchScanner extends ClientSimpleScanner { // Rethrow the exception so the application can handle it. while (!exceptionsQueue.isEmpty()) { Exception first = exceptionsQueue.peek(); - first.printStackTrace(); + LOG.error("Exception occurred in prefetch task", first); if (first instanceof IOException) { throw (IOException) first; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java index a0be0bfc1b..ee6e3570fd 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java @@ -279,7 +279,7 @@ public class RpcRetryingCallerWithReadReplicas { throws RetriesExhaustedException, DoNotRetryIOException { Throwable t = e.getCause(); assert t != null; // That's what ExecutionException is about: holding an exception - t.printStackTrace(); + LOG.error("ExecutionException encountered", t); if (t instanceof RetriesExhaustedException) { throw (RetriesExhaustedException) t; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java index 716322cff9..5428ed8744 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java @@ -244,27 +244,28 @@ public class ParseFilter { throws CharacterCodingException { String filterName = Bytes.toString(getFilterName(filterStringAsByteArray)); - ArrayList filterArguments = getFilterArguments(filterStringAsByteArray); + ArrayList filterArguments = getFilterArguments(filterStringAsByteArray); if (!filterHashMap.containsKey(filterName)) { throw new IllegalArgumentException("Filter Name " + filterName + " not supported"); } + filterName = filterHashMap.get(filterName); + final String methodName = "createFilterFromArguments"; try { - filterName = filterHashMap.get(filterName); Class c = Class.forName(filterName); - Class[] argTypes = new Class [] {ArrayList.class}; - Method m = c.getDeclaredMethod("createFilterFromArguments", argTypes); - return (Filter) m.invoke(null,filterArguments); + Class[] argTypes = new Class[] { ArrayList.class }; + Method m = c.getDeclaredMethod(methodName, argTypes); + return (Filter) m.invoke(null, filterArguments); } catch (ClassNotFoundException e) { - e.printStackTrace(); + LOG.error("Could not find class {}", filterName, e); } catch (NoSuchMethodException e) { - e.printStackTrace(); + LOG.error("Could not find method {} in {}", methodName, filterName, e); } catch (IllegalAccessException e) { - e.printStackTrace(); + LOG.error("Unable to access specified class {}", filterName, e); } catch (InvocationTargetException e) { - e.printStackTrace(); + LOG.error("Method {} threw an exception for {}", methodName, filterName, e); } - throw new IllegalArgumentException("Incorrect filter string " + - new String(filterStringAsByteArray, StandardCharsets.UTF_8)); + throw new IllegalArgumentException( + "Incorrect filter string " + new String(filterStringAsByteArray, StandardCharsets.UTF_8)); } /** diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java index 1248f874dd..183347a774 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java @@ -35,6 +35,8 @@ import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Sample Uploader MapReduce @@ -60,6 +62,7 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Private public class SampleUploader extends Configured implements Tool { + private static final Logger LOG = LoggerFactory.getLogger(SampleUploader.class); private static final String NAME = "SampleUploader"; @@ -100,7 +103,7 @@ public class SampleUploader extends Configured implements Tool { try { context.write(new ImmutableBytesWritable(row), put); } catch (InterruptedException e) { - e.printStackTrace(); + LOG.error("Interrupted emitting put", e); } // Set status every checkpoint lines diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java index 07486bfaf0..87f82d86f4 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java @@ -48,12 +48,15 @@ import org.apache.thrift.transport.TSaslClientTransport; import org.apache.thrift.transport.TSocket; import org.apache.thrift.transport.TTransport; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * See the instructions under hbase-examples/README.txt */ @InterfaceAudience.Private public class DemoClient { + private static final Logger LOG = LoggerFactory.getLogger(DemoClient.class); static protected int port; static protected String host; @@ -115,7 +118,7 @@ public class DemoClient { try { return s.getBytes("UTF-8"); } catch (UnsupportedEncodingException e) { - e.printStackTrace(); + LOG.error("CharSetName {} not supported", s, e); return null; } } diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java index 422d405bb0..fd214d1753 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java @@ -53,12 +53,15 @@ import org.ietf.jgss.GSSException; import org.ietf.jgss.GSSManager; import org.ietf.jgss.GSSName; import org.ietf.jgss.Oid; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * See the instructions under hbase-examples/README.txt */ @InterfaceAudience.Private public class HttpDoAsClient { + private static final Logger LOG = LoggerFactory.getLogger(HttpDoAsClient.class); static protected int port; static protected String host; @@ -113,7 +116,7 @@ public class HttpDoAsClient { try { return s.getBytes("UTF-8"); } catch (UnsupportedEncodingException e) { - e.printStackTrace(); + LOG.error("CharSetName {} not supported", s, e); return null; } } @@ -188,7 +191,7 @@ public class HttpDoAsClient { try { httpClient.setCustomHeader("Authorization", generateTicket()); } catch (GSSException e) { - e.printStackTrace(); + LOG.error("Kerberos authentication failed", e); } } return client; diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java index 0427f50ffe..0907fd49cd 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java @@ -40,6 +40,8 @@ import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.OutputFormat; import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hadoop.mapred.TextOutputFormat; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.Collection; @@ -51,6 +53,7 @@ import java.util.Map; @InterfaceAudience.Public @SuppressWarnings({ "rawtypes", "unchecked" }) public class TableMapReduceUtil { + private static final Logger LOG = LoggerFactory.getLogger(TableMapReduceUtil.class); /** * Use this before submitting a TableMap job. It will @@ -110,14 +113,14 @@ public class TableMapReduceUtil { try { addDependencyJars(job); } catch (IOException e) { - e.printStackTrace(); + LOG.error("IOException encountered while adding dependency jars", e); } } try { initCredentials(job); } catch (IOException ioe) { // just spit out the stack trace? really? - ioe.printStackTrace(); + LOG.error("IOException encountered while initializing credentials", ioe); } } @@ -310,7 +313,7 @@ public class TableMapReduceUtil { User user = userProvider.getCurrent(); TokenUtil.addTokenForJob(conn, job, user); } catch (InterruptedException ie) { - ie.printStackTrace(); + LOG.error("Interrupted obtaining user authentication token", ie); Thread.currentThread().interrupt(); } finally { conn.close(); diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java index ff0f01ca19..0a5ff60e81 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java @@ -176,7 +176,7 @@ public class CellCounter extends Configured implements Tool { context.getCounter(Counters.CELLS).increment(cellCount); } } catch (InterruptedException e) { - e.printStackTrace(); + LOG.error("Interrupted while writing cellCount", e); } } } diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java index 2e9e62cf37..b20f07fa7a 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java @@ -339,7 +339,7 @@ public class CopyTable extends Configured implements Tool { dstTableName = tableName; } } catch (Exception e) { - e.printStackTrace(); + LOG.error("Failed to parse commandLine arguments", e); printUsage("Can't start because " + e.getMessage()); return false; } diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java index b67225e70d..2d742fc6d2 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java @@ -713,7 +713,7 @@ public class HashTable extends Configured implements Tool { } } catch (Exception e) { - e.printStackTrace(); + LOG.error("Failed to parse commandLine arguments", e); printUsage("Can't start because " + e.getMessage()); return false; } diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java index f7405fde83..bcac927cc4 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java @@ -222,7 +222,7 @@ public class Import extends Configured implements Tool { } } } catch (InterruptedException e) { - e.printStackTrace(); + LOG.error("Interrupted while emitting Cell", e); } } @@ -286,7 +286,7 @@ public class Import extends Configured implements Tool { } } } catch (InterruptedException e) { - e.printStackTrace(); + LOG.error("Interrupted while emitting Cell", e); } } @@ -319,7 +319,7 @@ public class Import extends Configured implements Tool { try { writeResult(row, value, context); } catch (InterruptedException e) { - e.printStackTrace(); + LOG.error("Interrupted while writing result", e); } } diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java index 32b7561dda..a9688702f8 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java @@ -779,7 +779,7 @@ public class SyncTable extends Configured implements Tool { } catch (Exception e) { - e.printStackTrace(); + LOG.error("Failed to parse commandLine arguments", e); printUsage("Can't start because " + e.getMessage()); return false; } diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java index 8dc7156d09..66c636465e 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java @@ -39,14 +39,16 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Counter; import org.apache.hadoop.mapreduce.Mapper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Write table content out to files in hdfs. */ @InterfaceAudience.Public public class TsvImporterMapper -extends Mapper -{ + extends Mapper { + private static final Logger LOG = LoggerFactory.getLogger(TsvImporterMapper.class); /** Timestamp for all inserted rows */ protected long ts; @@ -199,7 +201,7 @@ extends Mapper } throw new IOException(badLine); } catch (InterruptedException e) { - e.printStackTrace(); + LOG.error("Interrupted while emitting put", e); } } diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterTextMapper.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterTextMapper.java index f3f81ec1a7..0127f26955 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterTextMapper.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterTextMapper.java @@ -28,14 +28,16 @@ import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Counter; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.conf.Configuration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Write table content out to map output files. */ @InterfaceAudience.Public public class TsvImporterTextMapper -extends Mapper -{ + extends Mapper { + private static final Logger LOG = LoggerFactory.getLogger(TsvImporterTextMapper.class); /** Column seperator */ private String separator; @@ -121,7 +123,7 @@ extends Mapper } throw new IOException(badLine); } catch (InterruptedException e) { - e.printStackTrace(); + LOG.error("Interrupted while emitting TSV text", e); Thread.currentThread().interrupt(); } } diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java index fe8ff94100..4c7df0b160 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java @@ -123,7 +123,7 @@ public class WALPlayer extends Configured implements Tool { } } } catch (InterruptedException e) { - e.printStackTrace(); + LOG.error("Interrupted while emitting Cell", e); } } @@ -199,7 +199,7 @@ public class WALPlayer extends Configured implements Tool { } } } catch (InterruptedException e) { - e.printStackTrace(); + LOG.error("Interrupted while writing results", e); } } diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java index 85eebc50bd..d1b5c607fb 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java @@ -644,7 +644,7 @@ public class VerifyReplication extends Configured implements Tool { } } catch (Exception e) { - e.printStackTrace(); + LOG.error("Failed to parse commandLine arguments", e); printUsage("Can't start because " + e.getMessage()); return false; } diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALPrettyPrinter.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALPrettyPrinter.java index c692365b03..76a3ef2dc0 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALPrettyPrinter.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALPrettyPrinter.java @@ -44,6 +44,8 @@ import org.apache.hbase.thirdparty.org.apache.commons.cli.HelpFormatter; import org.apache.hbase.thirdparty.org.apache.commons.cli.Options; import org.apache.hbase.thirdparty.org.apache.commons.cli.ParseException; import org.apache.hbase.thirdparty.org.apache.commons.cli.PosixParser; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * ProcedureWALPrettyPrinter prints the contents of a given ProcedureWAL file @@ -51,6 +53,8 @@ import org.apache.hbase.thirdparty.org.apache.commons.cli.PosixParser; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS) @InterfaceStability.Evolving public class ProcedureWALPrettyPrinter extends Configured implements Tool { + private static final Logger LOG = LoggerFactory.getLogger(ProcedureWALPrettyPrinter.class); + private final PrintStream out; public ProcedureWALPrettyPrinter() { @@ -171,7 +175,7 @@ public class ProcedureWALPrettyPrinter extends Configured implements Tool { return(-1); } } catch (ParseException e) { - e.printStackTrace(); + LOG.error("Failed to parse commandLine arguments", e); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("ProcedureWALPrettyPrinter ", options, true); return(-1); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java index 19143932b5..ef84062e32 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java @@ -298,7 +298,7 @@ public class LocalHBaseCluster { LOG.info("Waiting on " + rst.getRegionServer().toString()); rst.join(); } catch (InterruptedException e) { - e.printStackTrace(); + LOG.error("Interrupted while waiting for {} to finish. Retrying join", rst.getName(), e); } } regionThreads.remove(rst); @@ -370,7 +370,7 @@ public class LocalHBaseCluster { LOG.info("Waiting on " + masterThread.getMaster().getServerName().toString()); masterThread.join(); } catch (InterruptedException e) { - e.printStackTrace(); + LOG.error("Interrupted while waiting for {} to finish. Retrying join", masterThread.getName(), e); } } masterThreads.remove(masterThread); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java index 1feeeeb6e2..489894e8c6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java @@ -315,7 +315,7 @@ public class HMasterCommandLine extends ServerCommandLine { try { this.zkcluster.shutdown(); } catch (IOException e) { - e.printStackTrace(); + LOG.error("Failed to shutdown MiniZooKeeperCluster", e); } } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java index 85877febec..3fa496fea1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java @@ -50,6 +50,8 @@ import org.apache.hbase.thirdparty.org.apache.commons.cli.ParseException; import org.apache.hbase.thirdparty.org.apache.commons.cli.PosixParser; import com.fasterxml.jackson.databind.ObjectMapper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * WALPrettyPrinter prints the contents of a given WAL with a variety of @@ -67,6 +69,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS) @InterfaceStability.Evolving public class WALPrettyPrinter { + private static final Logger LOG = LoggerFactory.getLogger(WALPrettyPrinter.class); + private boolean outputValues; private boolean outputJSON; // The following enable filtering by sequence, region, and row, respectively @@ -400,7 +404,7 @@ public class WALPrettyPrinter { if (cmd.hasOption("w")) printer.setRowFilter(cmd.getOptionValue("w")); } catch (ParseException e) { - e.printStackTrace(); + LOG.error("Failed to parse commandLine arguments", e); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("HFile filename(s) ", options, true); System.exit(-1); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java index 3e75584751..fc0032705e 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java @@ -206,7 +206,7 @@ public class ThriftServer { try { this.infoServer.stop(); } catch (Exception ex) { - ex.printStackTrace(); + LOG.error("Failed to stop infoServer", ex); } } serverRunner.shutdown(); diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java index 0193515879..8e70611e86 100644 --- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java +++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java @@ -44,6 +44,8 @@ import org.apache.zookeeper.server.ServerConfig; import org.apache.zookeeper.server.ZooKeeperServerMain; import org.apache.zookeeper.server.quorum.QuorumPeerConfig; import org.apache.zookeeper.server.quorum.QuorumPeerMain; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * HBase's version of ZooKeeper's QuorumPeer. When HBase is set to manage @@ -55,6 +57,8 @@ import org.apache.zookeeper.server.quorum.QuorumPeerMain; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS) @InterfaceStability.Evolving public final class HQuorumPeer { + private static final Logger LOG = LoggerFactory.getLogger(HQuorumPeer.class); + private HQuorumPeer() { } @@ -77,7 +81,7 @@ public final class HQuorumPeer { runZKServer(zkConfig); } catch (Exception e) { - e.printStackTrace(); + LOG.error("Failed to start ZKServer", e); System.exit(-1); } } -- 2.15.2 (Apple Git-101.1)