From 97c89abf4476e0efc944018b28bdd341f779a837 Mon Sep 17 00:00:00 2001 From: Balazs Meszaros Date: Thu, 10 May 2018 15:38:01 +0200 Subject: [PATCH] HBASE-20563 Use parameterized logging in hbase-common --- .../org/apache/hadoop/hbase/AuthUtil.java | 6 +- .../hadoop/hbase/CellComparatorImpl.java | 4 - .../org/apache/hadoop/hbase/ChoreService.java | 12 ++- .../hadoop/hbase/HBaseConfiguration.java | 22 +++-- .../org/apache/hadoop/hbase/KeyValue.java | 12 ++- .../apache/hadoop/hbase/ScheduledChore.java | 6 +- .../hadoop/hbase/codec/BaseDecoder.java | 6 +- .../hadoop/hbase/io/ByteBufferPool.java | 18 ++-- .../hadoop/hbase/io/compress/Compression.java | 10 +-- .../hadoop/hbase/io/crypto/Encryption.java | 12 ++- .../hbase/io/encoding/RowIndexEncoderV1.java | 5 +- .../hbase/trace/HBaseHTraceConfiguration.java | 11 ++- .../hadoop/hbase/trace/SpanReceiverHost.java | 4 +- .../org/apache/hadoop/hbase/util/Base64.java | 14 ++-- .../hadoop/hbase/util/ByteBufferArray.java | 8 +- .../apache/hadoop/hbase/util/ClassSize.java | 14 +--- .../hadoop/hbase/util/CommonFSUtils.java | 83 ++++++++----------- .../hbase/util/CoprocessorClassLoader.java | 33 +++----- .../hadoop/hbase/util/DynamicClassLoader.java | 33 +++----- .../apache/hadoop/hbase/util/JSONBean.java | 42 ++++------ .../hadoop/hbase/util/JSONMetricUtil.java | 7 +- .../org/apache/hadoop/hbase/util/MD5Hash.java | 8 +- .../org/apache/hadoop/hbase/util/Methods.java | 11 +-- .../hadoop/hbase/util/ReflectionUtils.java | 4 +- .../org/apache/hadoop/hbase/util/Sleeper.java | 11 ++- .../org/apache/hadoop/hbase/util/Threads.java | 5 +- .../apache/hadoop/hbase/util/VersionInfo.java | 6 +- .../org/apache/hadoop/hbase/ClassFinder.java | 18 ++-- .../hbase/HBaseCommonTestingUtility.java | 7 +- .../apache/hadoop/hbase/ResourceChecker.java | 18 ++-- .../apache/hadoop/hbase/TestChoreService.java | 2 +- .../apache/hadoop/hbase/TestClassFinder.java | 17 ++-- .../hadoop/hbase/TestHBaseConfiguration.java | 13 ++- .../org/apache/hadoop/hbase/TestKeyValue.java | 4 +- .../java/org/apache/hadoop/hbase/Waiter.java | 18 ++-- .../hbase/io/crypto/TestEncryption.java | 2 +- .../io/crypto/TestKeyStoreKeyProvider.java | 5 +- .../hbase/util/ClassLoaderTestHelper.java | 4 +- .../hbase/util/LoadTestKVGenerator.java | 4 +- .../hbase/util/TestDynamicClassLoader.java | 4 +- .../hadoop/hbase/util/TestShowProperties.java | 2 +- .../apache/hadoop/hbase/util/TestThreads.java | 7 +- 42 files changed, 229 insertions(+), 303 deletions(-) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/AuthUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/AuthUtil.java index 5880b8c33b..eb55ced654 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/AuthUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/AuthUtil.java @@ -95,10 +95,10 @@ public class AuthUtil { conf.get("hbase.client.dns.nameserver", "default"))); userProvider.login("hbase.client.keytab.file", "hbase.client.kerberos.principal", host); } catch (UnknownHostException e) { - LOG.error("Error resolving host name: " + e.getMessage(), e); + LOG.error("Error resolving host name", e); throw e; } catch (IOException e) { - LOG.error("Error while trying to perform the initial login: " + e.getMessage(), e); + LOG.error("Error while trying to perform the initial login", e); throw e; } @@ -129,7 +129,7 @@ public class AuthUtil { try { ugi.checkTGTAndReloginFromKeytab(); } catch (IOException e) { - LOG.error("Got exception while trying to refresh credentials: " + e.getMessage(), e); + LOG.error("Got exception while trying to refresh credentials", e); } } }; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java index b1af716614..038e61ac9f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java @@ -23,9 +23,6 @@ import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import org.apache.hbase.thirdparty.com.google.common.primitives.Longs; /** @@ -44,7 +41,6 @@ import org.apache.hbase.thirdparty.com.google.common.primitives.Longs; @InterfaceAudience.Private @InterfaceStability.Evolving public class CellComparatorImpl implements CellComparator { - static final Logger LOG = LoggerFactory.getLogger(CellComparatorImpl.class); /** * Comparator for plain key/values; i.e. non-catalog table key/values. Works on Key portion * of KeyValue only. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java index 85d6131b50..47d6269bd9 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java @@ -157,9 +157,7 @@ public class ChoreService implements ChoreServicer { scheduledChores.put(chore, future); return true; } catch (Exception exception) { - if (LOG.isInfoEnabled()) { - LOG.info("Could not successfully schedule chore: " + chore.getName()); - } + LOG.info("Could not successfully schedule chore: {}", chore.getName()); return false; } } @@ -324,8 +322,8 @@ public class ChoreService implements ChoreServicer { public synchronized void shutdown() { scheduler.shutdownNow(); if (LOG.isInfoEnabled()) { - LOG.info("Chore service for: " + coreThreadPoolPrefix + " had " + scheduledChores.keySet() - + " on shutdown"); + LOG.info("Chore service for: {} had {} on shutdown", + coreThreadPoolPrefix, scheduledChores.keySet()); } cancelAllChores(true); scheduledChores.clear(); @@ -371,7 +369,7 @@ public class ChoreService implements ChoreServicer { output.put("Chore timeBetweenRuns: ", Long.toString(chore.getTimeBetweenRuns())); for (Entry entry : output.entrySet()) { - if (LOG.isTraceEnabled()) LOG.trace(entry.getKey() + entry.getValue()); + LOG.trace("{} {}", entry.getKey(), entry.getValue()); } } @@ -387,7 +385,7 @@ public class ChoreService implements ChoreServicer { Integer.toString(getNumberOfChoresMissingStartTime())); for (Entry entry : output.entrySet()) { - if (LOG.isTraceEnabled()) LOG.trace(entry.getKey() + entry.getValue()); + LOG.trace("{}", entry.getKey(), entry.getValue()); } } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java index d21ee9389f..68773e6f22 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java @@ -160,8 +160,8 @@ public class HBaseConfiguration extends Configuration { isShowConf = true; } } catch (LinkageError e) { - // should we handle it more aggressively in addition to log the error? - LOG.warn("Error thrown: ", e); + // should we handle it more aggressively in addition to log the error? + LOG.warn("Error thrown: ", e); } catch (ClassNotFoundException ce) { LOG.debug("ClassNotFound: ConfServlet"); // ignore @@ -190,8 +190,8 @@ public class HBaseConfiguration extends Configuration { public static int getInt(Configuration conf, String name, String deprecatedName, int defaultValue) { if (conf.get(deprecatedName) != null) { - LOG.warn(String.format("Config option \"%s\" is deprecated. Instead, use \"%s\"" - , deprecatedName, name)); + LOG.warn("Config option \"{}\" is deprecated. Instead, use \"{}\"", + deprecatedName, name); return conf.getInt(deprecatedName, defaultValue); } else { return conf.getInt(name, defaultValue); @@ -216,21 +216,19 @@ public class HBaseConfiguration extends Configuration { Method m = Configuration.class.getMethod("getPassword", String.class); char[] p = (char[]) m.invoke(conf, alias); if (p != null) { - LOG.debug(String.format("Config option \"%s\" was found through" + - " the Configuration getPassword method.", alias)); + LOG.debug("Config option \"{}\" was found through" + + " the Configuration getPassword method.", alias); passwd = new String(p); } else { - LOG.debug(String.format( - "Config option \"%s\" was not found. Using provided default value", - alias)); + LOG.debug("Config option \"{}\" was not found. " + + "Using provided default value", alias); passwd = defPass; } } catch (NoSuchMethodException e) { // this is a version of Hadoop where the credential //provider API doesn't exist yet - LOG.debug(String.format( - "Credential.getPassword method is not available." + - " Falling back to configuration.")); + LOG.debug("Credential.getPassword method is not available." + + " Falling back to configuration."); passwd = conf.get(alias, defPass); } catch (SecurityException e) { throw new IOException(e.getMessage(), e); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java index f3bfbd3fd0..faeb53c6d2 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java @@ -77,8 +77,6 @@ import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesti */ @InterfaceAudience.Private public class KeyValue implements ExtendedCell { - private static final ArrayList EMPTY_ARRAY_LIST = new ArrayList<>(); - private static final Logger LOG = LoggerFactory.getLogger(KeyValue.class); public static final int FIXED_OVERHEAD = ClassSize.OBJECT + // the KeyValue object itself @@ -2132,14 +2130,14 @@ public class KeyValue implements ExtendedCell { public byte[] calcIndexKey(byte[] lastKeyOfPreviousBlock, byte[] firstKeyInBlock) { byte[] fakeKey = getShortMidpointKey(lastKeyOfPreviousBlock, firstKeyInBlock); if (compareFlatKey(fakeKey, firstKeyInBlock) > 0) { - LOG.error("Unexpected getShortMidpointKey result, fakeKey:" - + Bytes.toStringBinary(fakeKey) + ", firstKeyInBlock:" - + Bytes.toStringBinary(firstKeyInBlock)); + LOG.error("Unexpected getShortMidpointKey result, fakeKey:{}" + + ", firstKeyInBlock:{}", Bytes.toStringBinary(fakeKey), + Bytes.toStringBinary(firstKeyInBlock)); return firstKeyInBlock; } if (lastKeyOfPreviousBlock != null && compareFlatKey(lastKeyOfPreviousBlock, fakeKey) >= 0) { - LOG.error("Unexpected getShortMidpointKey result, lastKeyOfPreviousBlock:" + - Bytes.toStringBinary(lastKeyOfPreviousBlock) + ", fakeKey:" + + LOG.error("Unexpected getShortMidpointKey result, lastKeyOfPreviousBlock:{}" + + ", fakeKey:{}", Bytes.toStringBinary(lastKeyOfPreviousBlock), Bytes.toStringBinary(fakeKey)); return firstKeyInBlock; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java index 468b5d30c3..64693b1bf9 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java @@ -173,11 +173,11 @@ public abstract class ScheduledChore implements Runnable { updateTimeTrackingBeforeRun(); if (missedStartTime() && isScheduled()) { onChoreMissedStartTime(); - if (LOG.isInfoEnabled()) LOG.info("Chore: " + getName() + " missed its start time"); + LOG.info("Chore: {} missed its start time", getName()); } else if (stopper.isStopped() || !isScheduled()) { cancel(false); cleanup(); - if (LOG.isInfoEnabled()) LOG.info("Chore: " + getName() + " was stopped"); + LOG.info("Chore: {} was stopped", getName()); } else { try { if (!initialChoreComplete) { @@ -186,7 +186,7 @@ public abstract class ScheduledChore implements Runnable { chore(); } } catch (Throwable t) { - if (LOG.isErrorEnabled()) LOG.error("Caught error", t); + LOG.error("Caught error", t); if (this.stopper.isStopped()) { cancel(false); cleanup(); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java index e1a96bdcf4..184d324793 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java @@ -80,10 +80,10 @@ public abstract class BaseDecoder implements Codec.Decoder { } catch (Throwable t) { LOG.trace("Error getting available for error message - ignoring", t); } - if (!isEof) throw ioEx; - if (LOG.isTraceEnabled()) { - LOG.trace("Partial cell read caused by EOF", ioEx); + if (!isEof) { + throw ioEx; } + LOG.trace("Partial cell read caused by EOF", ioEx); EOFException eofEx = new EOFException("Partial cell read"); eofEx.initCause(ioEx); throw eofEx; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferPool.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferPool.java index caca20b5ce..107039ac1e 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferPool.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferPool.java @@ -104,15 +104,13 @@ public class ByteBufferPool { int c = this.count.intValue(); if (c >= this.maxPoolSize) { if (maxPoolSizeInfoLevelLogged) { - if (LOG.isDebugEnabled()) { - LOG.debug("Pool already reached its max capacity : " + this.maxPoolSize - + " and no free buffers now. Consider increasing the value for '" - + MAX_POOL_SIZE_KEY + "' ?"); - } + LOG.debug("Pool already reached its max capacity : {} and no free " + + "buffers now. Consider increasing the value for '{}' ?", + maxPoolSize, MAX_POOL_SIZE_KEY); } else { - LOG.info("Pool already reached its max capacity : " + this.maxPoolSize - + " and no free buffers now. Consider increasing the value for '" + MAX_POOL_SIZE_KEY - + "' ?"); + LOG.info("Pool already reached its max capacity : {} and no free " + + "buffers now. Consider increasing the value for '{}' ?", + maxPoolSize, MAX_POOL_SIZE_KEY); maxPoolSizeInfoLevelLogged = true; } return null; @@ -120,9 +118,7 @@ public class ByteBufferPool { if (!this.count.compareAndSet(c, c + 1)) { continue; } - if (LOG.isTraceEnabled()) { - LOG.trace("Creating a new offheap ByteBuffer of size: " + this.bufferSize); - } + LOG.trace("Creating a new offheap ByteBuffer of size: {}", bufferSize); return this.directByteBuffer ? ByteBuffer.allocateDirect(this.bufferSize) : ByteBuffer.allocate(this.bufferSize); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java index d258ba2927..893fc9034b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java @@ -353,7 +353,7 @@ public final class Compression { CompressionCodec codec = getCodec(conf); if (codec != null) { Compressor compressor = CodecPool.getCompressor(codec); - if (LOG.isTraceEnabled()) LOG.trace("Retrieved compressor " + compressor + " from pool."); + LOG.trace("Retrieved compressor {} from pool.", compressor); if (compressor != null) { if (compressor.finished()) { // Somebody returns the compressor to CodecPool but is still using it. @@ -368,7 +368,7 @@ public final class Compression { public void returnCompressor(Compressor compressor) { if (compressor != null) { - if (LOG.isTraceEnabled()) LOG.trace("Returning compressor " + compressor + " to pool."); + LOG.trace("Returning compressor {} to pool.", compressor); CodecPool.returnCompressor(compressor); } } @@ -377,7 +377,7 @@ public final class Compression { CompressionCodec codec = getCodec(conf); if (codec != null) { Decompressor decompressor = CodecPool.getDecompressor(codec); - if (LOG.isTraceEnabled()) LOG.trace("Retrieved decompressor " + decompressor + " from pool."); + LOG.trace("Retrieved decompressor {} from pool.", decompressor); if (decompressor != null) { if (decompressor.finished()) { // Somebody returns the decompressor to CodecPool but is still using it. @@ -393,10 +393,10 @@ public final class Compression { public void returnDecompressor(Decompressor decompressor) { if (decompressor != null) { - if (LOG.isTraceEnabled()) LOG.trace("Returning decompressor " + decompressor + " to pool."); + LOG.trace("Returning decompressor {} to pool.", decompressor); CodecPool.returnDecompressor(decompressor); if (decompressor.getClass().isAnnotationPresent(DoNotPool.class)) { - if (LOG.isTraceEnabled()) LOG.trace("Ending decompressor " + decompressor); + LOG.trace("Ending decompressor {}", decompressor); decompressor.end(); } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java index af0089d02c..afb0dcca00 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java @@ -483,10 +483,10 @@ public final class Encryption { String alternateAlgorithm = conf.get(HConstants.CRYPTO_ALTERNATE_KEY_ALGORITHM_CONF_KEY); if (alternateAlgorithm != null) { if (LOG.isDebugEnabled()) { - LOG.debug("Unable to decrypt data with current cipher algorithm '" - + conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES) - + "'. Trying with the alternate cipher algorithm '" + alternateAlgorithm - + "' configured."); + LOG.debug("Unable to decrypt data with current cipher algorithm '{}'. " + + "Trying with the alternate cipher algorithm '{}' configured.", + conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES), + alternateAlgorithm); } Cipher alterCipher = Encryption.getCipher(conf, alternateAlgorithm); if (alterCipher == null) { @@ -547,9 +547,7 @@ public final class Encryption { getClassLoaderForClass(KeyProvider.class).loadClass(providerClassName), conf); provider.init(providerParameters); - if (LOG.isDebugEnabled()) { - LOG.debug("Installed " + providerClassName + " into key provider cache"); - } + LOG.debug("Installed {} into key provider cache", providerClassName); keyProviderCache.put(providerCacheKey, provider); return provider; } catch (Exception e) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexEncoderV1.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexEncoderV1.java index 7dbbdba980..92b47035ca 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexEncoderV1.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexEncoderV1.java @@ -79,9 +79,8 @@ public class RowIndexEncoderV1 { } out.writeInt(onDiskDataSize); if (LOG.isTraceEnabled()) { - LOG.trace("RowNumber: " + rowsOffsetBAOS.size() / 4 - + ", onDiskDataSize: " + onDiskDataSize + ", totalOnDiskSize: " - + (out.size() - startOffset)); + LOG.trace("RowNumber: {}, onDiskDataSize: {}, totalOnDiskSize: {}", + rowsOffsetBAOS.size() / 4, onDiskDataSize, out.size() - startOffset); } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.java index 03d03d9fe4..6f26de0a4a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.java @@ -37,14 +37,14 @@ public class HBaseHTraceConfiguration extends HTraceConfiguration { String newKey = KEY_PREFIX + key; String oldValue = conf.get(oldKey); if (oldValue != null) { - LOG.warn("Warning: using deprecated configuration key " + oldKey + - ". Please use " + newKey + " instead."); + LOG.warn("Warning: using deprecated configuration key {}. " + + "Please use {} instead.", oldKey, newKey); String newValue = conf.get(newKey); if (newValue == null) { conf.set(newKey, oldValue); } else { - LOG.warn("Conflicting values for " + newKey + " and " + oldKey + - ". Using " + newValue); + LOG.warn("Conflicting values for {} and {}. Using {}", + newKey, oldKey, newValue); } } } @@ -69,8 +69,7 @@ public class HBaseHTraceConfiguration extends HTraceConfiguration { @Override public String get(String key, String defaultValue) { - return conf.get(KEY_PREFIX + key,defaultValue); - + return conf.get(KEY_PREFIX + key, defaultValue); } @Override diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java index 14ef945d75..ff1d0605fd 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java @@ -93,7 +93,7 @@ public class SpanReceiverHost { SpanReceiver receiver = builder.className(className).build(); if (receiver != null) { receivers.add(receiver); - LOG.info("SpanReceiver " + className + " was loaded successfully."); + LOG.info("SpanReceiver {} was loaded successfully.", className); } } for (SpanReceiver rcvr : receivers) { @@ -111,7 +111,7 @@ public class SpanReceiverHost { try { rcvr.close(); } catch (IOException e) { - LOG.warn("Unable to close SpanReceiver correctly: " + e.getMessage(), e); + LOG.warn("Unable to close SpanReceiver correctly", e); } } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java index f3ac52b8e2..0a9beac145 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java @@ -834,12 +834,11 @@ public class Base64 { return 3; } catch (Exception e) { - LOG.error("error decoding bytes at " + source[srcOffset] + ": " + - (DECODABET[source[srcOffset]]) + ", " + source[srcOffset + 1] + - ": " + (DECODABET[source[srcOffset + 1]]) + ", " + - source[srcOffset + 2] + ": " + (DECODABET[source[srcOffset + 2]]) + - ", " + source[srcOffset + 3] + ": " + - (DECODABET[source[srcOffset + 3]]), e); + LOG.error("error decoding bytes at {}: {}, {}: {}, {}: {}, {}: {}", + source[srcOffset], DECODABET[source[srcOffset]], + source[srcOffset + 1], DECODABET[source[srcOffset + 1]], + source[srcOffset + 2], DECODABET[source[srcOffset + 2]], + source[srcOffset + 3], DECODABET[source[srcOffset + 3]], e); return -1; } // end catch } @@ -888,8 +887,7 @@ public class Base64 { } // end if: quartet built } // end if: equals sign or better } else { - LOG.error("Bad Base64 input character at " + i + ": " + source[i] + - "(decimal)"); + LOG.error("Bad Base64 input character at {}: {} (decimal)", i, source[i]); return null; } // end else: } // each input character diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java index 2e14b13a2b..22b5c93e14 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java @@ -67,9 +67,11 @@ public class ByteBufferArray { if (this.bufferSize > (capacity / 16)) this.bufferSize = (int) roundUp(capacity / 16, 32768); this.bufferCount = (int) (roundUp(capacity, bufferSize) / bufferSize); - LOG.info("Allocating buffers total=" + StringUtils.byteDesc(capacity) - + ", sizePerBuffer=" + StringUtils.byteDesc(bufferSize) + ", count=" - + bufferCount); + if (LOG.isInfoEnabled()) { + LOG.info("Allocating buffers total={}, sizePerBuffer={}, count={}", + StringUtils.byteDesc(capacity), StringUtils.byteDesc(bufferSize), + bufferCount); + } buffers = new ByteBuffer[bufferCount + 1]; createBuffers(allocator); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java index 6f88c005cb..7286b14d4b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java @@ -390,11 +390,7 @@ public class ClassSize { else if (name.equals("double") || name.equals("D")) primitives += Bytes.SIZEOF_DOUBLE; } - if (debug) { - if (LOG.isDebugEnabled()) { - LOG.debug("" + index + " " + aField.getName() + " " + aField.getType()); - } - } + LOG.debug("{} {} {}", index, aField.getName(), aField.getType()); index++; } } @@ -417,11 +413,9 @@ public class ClassSize { // Round up to a multiple of 8 long size = align(prealign_size) + align(coeff[1] * ARRAY); if (debug) { - if (LOG.isDebugEnabled()) { - LOG.debug("Primitives=" + coeff[0] + ", arrays=" + coeff[1] + - ", references=" + coeff[2] + ", refSize " + REFERENCE + - ", size=" + size + ", prealign_size=" + prealign_size); - } + LOG.debug("Primitives={}, arrays={}, references={}, refSize={}, " + + "size={}, prealign_size={}", coeff[0], coeff[1], coeff[2], + REFERENCE, size, prealign_size); } return size; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java index 5b46de9733..660313b84d 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java @@ -243,9 +243,8 @@ public abstract class CommonFSUtils { */ public static FSDataOutputStream create(FileSystem fs, Path path, FsPermission perm, boolean overwrite) throws IOException { - if (LOG.isTraceEnabled()) { - LOG.trace("Creating file=" + path + " with permission=" + perm + ", overwrite=" + overwrite); - } + LOG.trace("Creating file={} with permission={}, overwrite={}", + path, perm, overwrite); return fs.create(path, perm, overwrite, getDefaultBufferSize(fs), getDefaultReplication(fs, path), getDefaultBlockSize(fs, path), null); } @@ -280,10 +279,8 @@ public abstract class CommonFSUtils { FsPermission umask = new FsPermission(mask); return perm.applyUMask(umask); } catch (IllegalArgumentException e) { - LOG.warn( - "Incorrect umask attempted to be created: " - + conf.get(permssionConfKey) - + ", using default file permissions.", e); + LOG.warn("Incorrect umask attempted to be created: {}, using " + + "default file permissions.", conf.get(permssionConfKey), e); return FsPermission.getFileDefault(); } } @@ -476,9 +473,7 @@ public abstract class CommonFSUtils { final Path path, final String policyKey, final String defaultPolicy) { String storagePolicy = conf.get(policyKey, defaultPolicy).toUpperCase(Locale.ROOT); if (storagePolicy.equals(defaultPolicy)) { - if (LOG.isTraceEnabled()) { - LOG.trace("default policy of " + defaultPolicy + " requested, exiting early."); - } + LOG.trace("default policy of {} requested, exiting early.", defaultPolicy); return; } setStoragePolicy(fs, path, storagePolicy); @@ -509,16 +504,12 @@ public abstract class CommonFSUtils { public static void setStoragePolicy(final FileSystem fs, final Path path, final String storagePolicy) { if (storagePolicy == null) { - if (LOG.isTraceEnabled()) { - LOG.trace("We were passed a null storagePolicy, exiting early."); - } + LOG.trace("We were passed a null storagePolicy, exiting early."); return; } final String trimmedStoragePolicy = storagePolicy.trim(); if (trimmedStoragePolicy.isEmpty()) { - if (LOG.isTraceEnabled()) { - LOG.trace("We were passed an empty storagePolicy, exiting early."); - } + LOG.trace("We were passed an empty storagePolicy, exiting early."); return; } invokeSetStoragePolicy(fs, path, trimmedStoragePolicy); @@ -561,18 +552,16 @@ public abstract class CommonFSUtils { if (m != null) { try { m.invoke(fs, path, storagePolicy); - if (LOG.isDebugEnabled()) { - LOG.debug("Set storagePolicy=" + storagePolicy + " for path=" + path); - } + LOG.debug("Set storagePolicy={} for path={}", storagePolicy, path); } catch (Exception e) { // This swallows FNFE, should we be throwing it? seems more likely to indicate dev // misuse than a runtime problem with HDFS. if (!warningMap.containsKey(fs)) { warningMap.put(fs, true); - LOG.warn("Unable to set storagePolicy=" + storagePolicy + " for path=" + path + ". " + - "DEBUG log level might have more details.", e); + LOG.warn("Unable to set storagePolicy={} for path={}. " + + "DEBUG log level might have more details.", storagePolicy, path, e); } else if (LOG.isDebugEnabled()) { - LOG.debug("Unable to set storagePolicy=" + storagePolicy + " for path=" + path, e); + LOG.debug("Unable to set storagePolicy={} for path={}", storagePolicy, path, e); } // check for lack of HDFS-7228 if (e instanceof InvocationTargetException) { @@ -580,25 +569,22 @@ public abstract class CommonFSUtils { if (exception instanceof RemoteException && HadoopIllegalArgumentException.class.getName().equals( ((RemoteException)exception).getClassName())) { - if (LOG.isDebugEnabled()) { - LOG.debug("Given storage policy, '" +storagePolicy +"', was rejected and probably " + - "isn't a valid policy for the version of Hadoop you're running. I.e. if you're " + - "trying to use SSD related policies then you're likely missing HDFS-7228. For " + - "more information see the 'ArchivalStorage' docs for your Hadoop release."); - } + LOG.debug("Given storage policy, '{}', was rejected and probably " + + "isn't a valid policy for the version of Hadoop you're running. I.e. if you're " + + "trying to use SSD related policies then you're likely missing HDFS-7228. For " + + "more information see the 'ArchivalStorage' docs for your Hadoop release.", + storagePolicy); // Hadoop 2.8+, 3.0-a1+ added FileSystem.setStoragePolicy with a default implementation // that throws UnsupportedOperationException } else if (exception instanceof UnsupportedOperationException) { - if (LOG.isDebugEnabled()) { - LOG.debug("The underlying FileSystem implementation doesn't support " + - "setStoragePolicy. This is probably intentional on their part, since HDFS-9345 " + - "appears to be present in your version of Hadoop. For more information check " + - "the Hadoop documentation on 'ArchivalStorage', the Hadoop FileSystem " + - "specification docs from HADOOP-11981, and/or related documentation from the " + - "provider of the underlying FileSystem (its name should appear in the " + - "stacktrace that accompanies this message). Note in particular that Hadoop's " + - "local filesystem implementation doesn't support storage policies.", exception); - } + LOG.debug("The underlying FileSystem implementation doesn't support " + + "setStoragePolicy. This is probably intentional on their part, since HDFS-9345 " + + "appears to be present in your version of Hadoop. For more information check " + + "the Hadoop documentation on 'ArchivalStorage', the Hadoop FileSystem " + + "specification docs from HADOOP-11981, and/or related documentation from the " + + "provider of the underlying FileSystem (its name should appear in the " + + "stacktrace that accompanies this message). Note in particular that Hadoop's " + + "local filesystem implementation doesn't support storage policies.", exception); } } } @@ -656,9 +642,7 @@ public abstract class CommonFSUtils { status = filter == null ? fs.listStatus(dir) : fs.listStatus(dir, filter); } catch (FileNotFoundException fnfe) { // if directory doesn't exist, return null - if (LOG.isTraceEnabled()) { - LOG.trace(dir + " doesn't exist"); - } + LOG.trace("{} doesn't exist", dir); } if (status == null || status.length < 1) { return null; @@ -699,9 +683,7 @@ public abstract class CommonFSUtils { } } catch (FileNotFoundException fnfe) { // if directory doesn't exist, return null - if (LOG.isTraceEnabled()) { - LOG.trace(dir + " doesn't exist"); - } + LOG.trace("{} doesn't exist", dir); } return status; } @@ -741,8 +723,10 @@ public abstract class CommonFSUtils { */ public static void logFileSystemState(final FileSystem fs, final Path root, Logger LOG) throws IOException { - LOG.debug("File system contents for path " + root); - logFSTree(LOG, fs, root, "|-"); + if (LOG.isDebugEnabled()) { + LOG.debug("File system contents for path {}", root); + logFSTree(LOG, fs, root, "|-"); + } } /** @@ -759,10 +743,10 @@ public abstract class CommonFSUtils { for (FileStatus file : files) { if (file.isDirectory()) { - LOG.debug(prefix + file.getPath().getName() + "/"); + LOG.debug("{}{}/", prefix, file.getPath().getName()); logFSTree(LOG, fs, file.getPath(), prefix + "---"); } else { - LOG.debug(prefix + file.getPath().getName()); + LOG.debug("{}{}", prefix, file.getPath().getName()); } } } @@ -786,7 +770,8 @@ public abstract class CommonFSUtils { boolean useHBaseChecksum = conf.getBoolean(HConstants.HBASE_CHECKSUM_VERIFICATION, true); if (shortCircuitSkipChecksum) { LOG.warn("Configuration \"dfs.client.read.shortcircuit.skip.checksum\" should not " + - "be set to true." + (useHBaseChecksum ? " HBase checksum doesn't require " + + "be set to true.{}", + (useHBaseChecksum ? " HBase checksum doesn't require " + "it, see https://issues.apache.org/jira/browse/HBASE-6868." : "")); assert !shortCircuitSkipChecksum; //this will fail if assertions are on } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java index f1589ba093..5e3609810e 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java @@ -243,7 +243,7 @@ public class CoprocessorClassLoader extends ClassLoaderBase { CoprocessorClassLoader cl = getIfCached(path); String pathStr = path.toString(); if (cl != null) { - LOG.debug("Found classloader "+ cl + " for "+ pathStr); + LOG.debug("Found classloader {} for {}", cl, pathStr); return cl; } @@ -255,7 +255,7 @@ public class CoprocessorClassLoader extends ClassLoaderBase { try { cl = getIfCached(path); if (cl != null) { - LOG.debug("Found classloader "+ cl + " for "+ pathStr); + LOG.debug("Found classloader {} for {}", cl, pathStr); return cl; } @@ -273,8 +273,8 @@ public class CoprocessorClassLoader extends ClassLoaderBase { CoprocessorClassLoader prev = classLoadersCache.putIfAbsent(path, cl); if (prev != null) { // Lost update race, use already added class loader - LOG.warn("THIS SHOULD NOT HAPPEN, a class loader" - +" is already cached for " + pathStr); + LOG.warn("THIS SHOULD NOT HAPPEN, a class loader is already cached for {}", + pathStr); cl = prev; } return cl; @@ -293,10 +293,7 @@ public class CoprocessorClassLoader extends ClassLoaderBase { throws ClassNotFoundException { // Delegate to the parent immediately if this class is exempt if (isClassExempt(name, includedClassPrefixes)) { - if (LOG.isDebugEnabled()) { - LOG.debug("Skipping exempt class " + name + - " - delegating directly to parent"); - } + LOG.debug("Skipping exempt class {} - delegating directly to parent", name); return parent.loadClass(name); } @@ -304,30 +301,22 @@ public class CoprocessorClassLoader extends ClassLoaderBase { // Check whether the class has already been loaded: Class clasz = findLoadedClass(name); if (clasz != null) { - if (LOG.isDebugEnabled()) { - LOG.debug("Class " + name + " already loaded"); - } + LOG.debug("Class {} already loaded", name); } else { try { // Try to find this class using the URLs passed to this ClassLoader - if (LOG.isDebugEnabled()) { - LOG.debug("Finding class: " + name); - } + LOG.debug("Finding class: {}", name); clasz = findClass(name); } catch (ClassNotFoundException e) { // Class not found using this ClassLoader, so delegate to parent - if (LOG.isDebugEnabled()) { - LOG.debug("Class " + name + " not found - delegating to parent"); - } + LOG.debug("Class {} not found - delegating to parent", name); try { clasz = parent.loadClass(name); } catch (ClassNotFoundException e2) { // Class not found in this ClassLoader or in the parent ClassLoader // Log some debug output before re-throwing ClassNotFoundException - if (LOG.isDebugEnabled()) { - LOG.debug("Class " + name + " not found in parent loader"); - } + LOG.debug("Class {} not found in parent loader", name); throw e2; } } @@ -343,9 +332,7 @@ public class CoprocessorClassLoader extends ClassLoaderBase { // Delegate to the parent first if necessary if (loadResourceUsingParentFirst(name)) { - if (LOG.isDebugEnabled()) { - LOG.debug("Checking parent first for resource " + name); - } + LOG.debug("Checking parent first for resource {}", name); resource = super.getResource(name); parentLoaded = true; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java index 28fce21b1c..cdab12a5a8 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java @@ -119,8 +119,7 @@ public class DynamicClassLoader extends ClassLoaderBase { try { remoteDirFs = remoteDir.getFileSystem(conf); } catch (IOException ioe) { - LOG.warn("Failed to identify the fs of dir " - + remoteDir + ", ignored", ioe); + LOG.warn("Failed to identify the fs of dir {}, ignored", remoteDir, ioe); remoteDir = null; } } @@ -132,9 +131,7 @@ public class DynamicClassLoader extends ClassLoaderBase { try { return parent.loadClass(name); } catch (ClassNotFoundException e) { - if (LOG.isDebugEnabled()) { - LOG.debug("Class " + name + " not found - using dynamical class loader"); - } + LOG.debug("Class {} not found - using dynamical class loader", name); if (useDynamicJars) { return tryRefreshClass(name); @@ -150,26 +147,18 @@ public class DynamicClassLoader extends ClassLoaderBase { // Check whether the class has already been loaded: Class clasz = findLoadedClass(name); if (clasz != null) { - if (LOG.isDebugEnabled()) { - LOG.debug("Class " + name + " already loaded"); - } + LOG.debug("Class {} already loaded", name); } else { try { - if (LOG.isDebugEnabled()) { - LOG.debug("Finding class: " + name); - } + LOG.debug("Finding class: {}", name); clasz = findClass(name); } catch (ClassNotFoundException cnfe) { // Load new jar files if any - if (LOG.isDebugEnabled()) { - LOG.debug("Loading new jar files, if any"); - } + LOG.debug("Loading new jar files, if any"); loadNewJars(); - if (LOG.isDebugEnabled()) { - LOG.debug("Finding class again: " + name); - } + LOG.debug("Finding class again: {}", name); clasz = findClass(name); } } @@ -193,7 +182,7 @@ public class DynamicClassLoader extends ClassLoaderBase { addURL(url); } catch (MalformedURLException mue) { // This should not happen, just log it - LOG.warn("Failed to load new jar " + fileName, mue); + LOG.warn("Failed to load new jar {}", fileName, mue); } } } @@ -205,7 +194,7 @@ public class DynamicClassLoader extends ClassLoaderBase { try { statuses = remoteDirFs.listStatus(remoteDir); } catch (IOException ioe) { - LOG.warn("Failed to check remote dir status " + remoteDir, ioe); + LOG.warn("Failed to check remote dir status {}", remoteDir, ioe); } } if (statuses == null || statuses.length == 0) { @@ -217,9 +206,7 @@ public class DynamicClassLoader extends ClassLoaderBase { Path path = status.getPath(); String fileName = path.getName(); if (!fileName.endsWith(".jar")) { - if (LOG.isDebugEnabled()) { - LOG.debug("Ignored non-jar file " + fileName); - } + LOG.debug("Ignored non-jar file {}", fileName); continue; // Ignore non-jar files } Long cachedLastModificationTime = jarModifiedTime.get(fileName); @@ -244,7 +231,7 @@ public class DynamicClassLoader extends ClassLoaderBase { URL url = dst.toURI().toURL(); addURL(url); } catch (IOException ioe) { - LOG.warn("Failed to load new jar " + fileName, ioe); + LOG.warn("Failed to load new jar {}", fileName, ioe); } } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java index 80ffa270d0..95b4cc5c6a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java @@ -106,7 +106,7 @@ public class JSONBean { final MBeanServer mBeanServer, ObjectName qry, String attribute, final boolean description) throws IOException { - LOG.trace("Listing beans for "+qry); + LOG.trace("Listing beans for {}", qry); Set names = null; names = mBeanServer.queryNames(qry, null); jg.writeArrayFieldStart("beans"); @@ -135,35 +135,29 @@ public class JSONBean { // UnsupportedOperationExceptions happen in the normal course of business, // so no need to log them as errors all the time. if (e.getCause() instanceof UnsupportedOperationException) { - if (LOG.isTraceEnabled()) { - LOG.trace("Getting attribute " + prs + " of " + oname + " threw " + e); - } + LOG.trace("Getting attribute {} of {} threw {}", prs, oname, e.toString()); } else { - LOG.error("Getting attribute " + prs + " of " + oname + " threw an exception", e); + LOG.error("Getting attribute {} of {} threw an exception", prs, oname, e); } return 0; } catch (AttributeNotFoundException e) { // If the modelerType attribute was not found, the class name is used // instead. - LOG.error("getting attribute " + prs + " of " + oname - + " threw an exception", e); + LOG.error("Getting attribute {} of {} threw an exception", prs, oname, e); } catch (MBeanException e) { // The code inside the attribute getter threw an exception so log it, // and fall back on the class name - LOG.error("getting attribute " + prs + " of " + oname - + " threw an exception", e); + LOG.error("Getting attribute {} of {} threw an exception", prs, oname, e); } catch (RuntimeException e) { // For some reason even with an MBeanException available to them // Runtime exceptionscan still find their way through, so treat them // the same as MBeanException - LOG.error("getting attribute " + prs + " of " + oname - + " threw an exception", e); + LOG.error("Getting attribute {} of {} threw an exception", prs, oname, e); } catch (ReflectionException e) { // This happens when the code inside the JMX bean (setter?? from the // java docs) threw an exception, so log it and fall back on the // class name - LOG.error("getting attribute " + prs + " of " + oname - + " threw an exception", e); + LOG.error("Getting attribute {} of {} threw an exception", prs, oname, e); } } catch (InstanceNotFoundException e) { //Ignored for some reason the bean was not found so don't output it @@ -171,14 +165,14 @@ public class JSONBean { } catch (IntrospectionException e) { // This is an internal error, something odd happened with reflection so // log it and don't output the bean. - LOG.error("Problem while trying to process JMX query: " + qry - + " with MBean " + oname, e); + LOG.error("Problem while trying to process JMX query: {} with MBean ", + qry, oname, e); continue; } catch (ReflectionException e) { // This happens when the code inside the JMX bean threw an exception, so // log it and don't output the bean. - LOG.error("Problem while trying to process JMX query: " + qry - + " with MBean " + oname, e); + LOG.error("Problem while trying to process JMX query: {} with MBean ", + qry, oname, e); continue; } @@ -233,17 +227,15 @@ public class JSONBean { // UnsupportedOperationExceptions happen in the normal course of business, // so no need to log them as errors all the time. if (e.getCause() instanceof UnsupportedOperationException) { - if (LOG.isTraceEnabled()) { - LOG.trace("Getting attribute " + attName + " of " + oname + " threw " + e); - } + LOG.trace("Getting attribute {} of {} threw {}", attName, oname, e.toString()); } else { - LOG.error("getting attribute "+attName+" of "+oname+" threw an exception", e); + LOG.error("Getting attribute {} of {} threw an exception", attName, oname, e); } return; } catch (RuntimeErrorException e) { // RuntimeErrorException happens when an unexpected failure occurs in getAttribute // for example https://issues.apache.org/jira/browse/DAEMON-120 - LOG.debug("getting attribute "+attName+" of "+oname+" threw an exception", e); + LOG.debug("Getting attribute {} of {} threw an exception", attName, oname, e); return; } catch (AttributeNotFoundException e) { //Ignored the attribute was not found, which should never happen because the bean @@ -253,17 +245,17 @@ public class JSONBean { } catch (MBeanException e) { //The code inside the attribute getter threw an exception so log it, and // skip outputting the attribute - LOG.error("getting attribute "+attName+" of "+oname+" threw an exception", e); + LOG.error("Getting attribute {} of {} threw an exception", attName, oname, e); return; } catch (RuntimeException e) { //For some reason even with an MBeanException available to them Runtime exceptions //can still find their way through, so treat them the same as MBeanException - LOG.error("getting attribute "+attName+" of "+oname+" threw an exception", e); + LOG.error("Getting attribute {} of {} threw an exception", attName, oname, e); return; } catch (ReflectionException e) { //This happens when the code inside the JMX bean (setter?? from the java docs) //threw an exception, so log it and skip outputting the attribute - LOG.error("getting attribute "+attName+" of "+oname+" threw an exception", e); + LOG.error("Getting attribute {} of {} threw an exception", attName, oname, e); return; } catch (InstanceNotFoundException e) { //Ignored the mbean itself was not found, which should never happen because we diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java index 7bc2257e35..c92668b26b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java @@ -80,10 +80,9 @@ public final class JSONMetricUtil { Object value = null; try { value = mbServer.getAttribute(bean, attribute); - } - catch(Exception e) { - LOG.error("Unable to get value from MBean= "+ bean.toString() + - "for attribute=" + attribute + " " + e.getMessage()); + } catch(Exception e) { + LOG.error("Unable to get value from MBean={} for attribute={} {}", + bean, attribute, e.getMessage()); } return value; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MD5Hash.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MD5Hash.java index c38f1a9f8b..f2c01e9040 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MD5Hash.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MD5Hash.java @@ -24,8 +24,6 @@ import java.security.NoSuchAlgorithmException; import org.apache.commons.codec.binary.Hex; import org.apache.yetus.audience.InterfaceAudience; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * Utility class for MD5 @@ -33,8 +31,6 @@ import org.slf4j.LoggerFactory; */ @InterfaceAudience.Public public class MD5Hash { - private static final Logger LOG = LoggerFactory.getLogger(MD5Hash.class); - /** * Given a byte array, returns in MD5 hash as a hex string. * @param key @@ -43,7 +39,7 @@ public class MD5Hash { public static String getMD5AsHex(byte[] key) { return getMD5AsHex(key, 0, key.length); } - + /** * Given a byte array, returns its MD5 hash as a hex string. * Only "length" number of bytes starting at "offset" within the @@ -51,7 +47,7 @@ public class MD5Hash { * * @param key the key to hash (variable length byte array) * @param offset - * @param length + * @param length * @return MD5 hash as a 32 character hex string. */ public static String getMD5AsHex(byte[] key, int offset, int length) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Methods.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Methods.java index 6e472a0714..7549866642 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Methods.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Methods.java @@ -39,8 +39,9 @@ public class Methods { Method m = clazz.getMethod(methodName, types); return m.invoke(instance, args); } catch (IllegalArgumentException arge) { - LOG.error(HBaseMarkers.FATAL, "Constructed invalid call. class="+clazz.getName()+ - " method=" + methodName + " types=" + Classes.stringify(types), arge); + LOG.error(HBaseMarkers.FATAL, + "Constructed invalid call. class={} method={} types={}", + clazz.getName(), methodName, Classes.stringify(types), arge); throw arge; } catch (NoSuchMethodException nsme) { throw new IllegalArgumentException( @@ -60,9 +61,9 @@ public class Methods { throw new IllegalArgumentException( "Denied access calling "+clazz.getName()+"."+methodName+"()", iae); } catch (SecurityException se) { - LOG.error(HBaseMarkers.FATAL, "SecurityException calling method. class="+ - clazz.getName()+" method=" + methodName + " types=" + - Classes.stringify(types), se); + LOG.error(HBaseMarkers.FATAL, + "SecurityException calling method. class={} method={} types={}", + clazz.getName(), methodName, Classes.stringify(types), se); throw se; } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java index a136846a92..67f75a1656 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java @@ -130,8 +130,8 @@ public class ReflectionUtils { printThreadInfo(new PrintStream(buffer, false, "UTF-8"), title); log.info(buffer.toString(Charset.defaultCharset().name())); } catch (UnsupportedEncodingException ignored) { - log.warn("Could not write thread info about '" + title + - "' due to a string encoding issue."); + log.warn("Could not write thread info about '{}' due to a string " + + "encoding issue.", title); } } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java index 7d4d692e1a..7690e50dfa 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java @@ -79,9 +79,8 @@ public class Sleeper { long now = System.currentTimeMillis(); long waitTime = this.period - (now - startTime); if (waitTime > this.period) { - LOG.warn("Calculated wait time > " + this.period + - "; setting to this.period: " + System.currentTimeMillis() + ", " + - startTime); + LOG.warn("Calculated wait time > {}; setting to this.period: {}, {}", + period, System.currentTimeMillis(), startTime); waitTime = this.period; } while (waitTime > 0) { @@ -94,10 +93,10 @@ public class Sleeper { woke = System.currentTimeMillis(); long slept = woke - now; if (slept - this.period > MINIMAL_DELTA_FOR_LOGGING) { - LOG.warn("We slept " + slept + "ms instead of " + this.period + - "ms, this is likely due to a long " + + LOG.warn("We slept {}ms instead of {}ms, this is likely due to a long " + "garbage collecting pause and it's usually bad, see " + - "http://hbase.apache.org/book.html#trouble.rs.runtime.zkexpired"); + "http://hbase.apache.org/book.html#trouble.rs.runtime.zkexpired", + slept, period); } } catch(InterruptedException iex) { // We we interrupted because we're meant to stop? If not, just diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java index 352734064a..65558d331a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java @@ -51,8 +51,7 @@ public class Threads { new UncaughtExceptionHandler() { @Override public void uncaughtException(Thread t, Throwable e) { - LOG.warn("Thread:" + t + " exited with Exception:" - + StringUtils.stringifyException(e)); + LOG.warn("Thread:{} exited with Exception:{}", t, StringUtils.stringifyException(e)); } }; @@ -112,7 +111,7 @@ public class Threads { try { t.join(joinwait); } catch (InterruptedException e) { - LOG.warn(t.getName() + "; joinwait=" + joinwait, e); + LOG.warn("{}; joinwait={}", t.getName(), joinwait, e); } } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java index 86ac06540e..a300ad3af2 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java @@ -107,8 +107,10 @@ public class VersionInfo { } public static void logVersion() { - for (String line : versionReport()) { - LOG.info(line); + if (LOG.isInfoEnabled()) { + for (String line : versionReport()) { + LOG.info(line); + } } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java index 9dccb291fb..50bfb40460 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java @@ -159,7 +159,7 @@ public class ClassFinder { resourcePath = isJar ? matcher.group(1) : resourcePath; if (null == this.resourcePathFilter || this.resourcePathFilter.isCandidatePath(resourcePath, isJar)) { - LOG.debug("Looking in " + resourcePath + "; isJar=" + isJar); + LOG.debug("Looking in {}; isJar={}", resourcePath, isJar); if (isJar) { jars.add(resourcePath); } else { @@ -185,7 +185,7 @@ public class ClassFinder { try { jarFile = new JarInputStream(new FileInputStream(jarFileName)); } catch (IOException ioEx) { - LOG.warn("Failed to look for classes in " + jarFileName + ": " + ioEx); + LOG.warn("Failed to look for classes in {}", jarFileName); throw ioEx; } @@ -199,7 +199,7 @@ public class ClassFinder { if (!proceedOnExceptions) { throw ioEx; } - LOG.warn("Failed to get next entry from " + jarFileName + ": " + ioEx); + LOG.warn("Failed to get next entry from {}", jarFileName, ioEx); break; } if (entry == null) { @@ -224,7 +224,7 @@ public class ClassFinder { Class c = makeClass(className, proceedOnExceptions); if (c != null) { if (!classes.add(c)) { - LOG.warn("Ignoring duplicate class " + className); + LOG.warn("Ignoring duplicate class {}", className); } } } @@ -238,13 +238,13 @@ public class ClassFinder { boolean proceedOnExceptions) throws ClassNotFoundException, LinkageError { Set> classes = new HashSet<>(); if (!baseDirectory.exists()) { - LOG.warn(baseDirectory.getAbsolutePath() + " does not exist"); + LOG.warn("{} does not exist", baseDirectory.getAbsolutePath()); return classes; } File[] files = baseDirectory.listFiles(this.fileFilter); if (files == null) { - LOG.warn("Failed to get files from " + baseDirectory.getAbsolutePath()); + LOG.warn("Failed to get files from {}", baseDirectory.getAbsolutePath()); return classes; } @@ -259,7 +259,7 @@ public class ClassFinder { Class c = makeClass(className, proceedOnExceptions); if (c != null) { if (!classes.add(c)) { - LOG.warn("Ignoring duplicate class " + className); + LOG.warn("Ignoring duplicate class {}", className); } } } @@ -277,12 +277,12 @@ public class ClassFinder { if (!proceedOnExceptions) { throw classNotFoundEx; } - LOG.debug("Failed to instantiate or check " + className + ": " + classNotFoundEx); + LOG.debug("Failed to instantiate or check {}", className, classNotFoundEx); } catch (LinkageError linkageEx) { if (!proceedOnExceptions) { throw linkageEx; } - LOG.debug("Failed to instantiate or check " + className + ": " + linkageEx); + LOG.debug("Failed to instantiate or check {}", className, linkageEx); } return null; } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java index 74f653c0da..b69d5e9c10 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java @@ -124,8 +124,7 @@ public class HBaseCommonTestingUtility { */ protected Path setupDataTestDir() { if (this.dataTestDir != null) { - LOG.warn("Data test dir already setup in " + - dataTestDir.getAbsolutePath()); + LOG.warn("Data test dir already setup in {}", dataTestDir.getAbsolutePath()); return null; } Path testPath = getRandomDir(); @@ -217,9 +216,9 @@ public class HBaseCommonTestingUtility { if (deleteOnExit()) FileUtils.deleteDirectory(dir); return true; } catch (IOException ex) { - LOG.warn("Failed to delete " + dir.getAbsolutePath()); + LOG.warn("Failed to delete {}", dir.getAbsolutePath()); } catch (IllegalArgumentException ex) { - LOG.warn("Failed to delete " + dir.getAbsolutePath(), ex); + LOG.warn("Failed to delete {}", dir.getAbsolutePath(), ex); } } while (ntries < 30); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceChecker.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceChecker.java index b42db95cfc..d4aedc46c5 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceChecker.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceChecker.java @@ -87,7 +87,7 @@ public class ResourceChecker { * @param phase */ abstract public int getVal(Phase phase); - + /* * Retrieves List of Strings which would be logged in logEndings() */ @@ -129,15 +129,19 @@ public class ResourceChecker { for (ResourceAnalyzer ra : ras) { int cur = vals[i++]; if (cur < ra.getMin()) { - LOG.warn(ra.getName() + "=" + cur + " is inferior to " + ra.getMin()); + LOG.warn("{}={} is inferior to {}", ra.getName(), cur, ra.getMin()); } if (cur > ra.getMax()) { - LOG.warn(ra.getName() + "=" + cur + " is superior to " + ra.getMax()); + LOG.warn("{}={} is superior to {}", ra.getName(), cur, ra.getMax()); } } } private void logInit() { + if (!LOG.isInfoEnabled()) { + return; + } + int i = 0; StringBuilder sb = new StringBuilder(); for (ResourceAnalyzer ra : ras) { @@ -145,13 +149,17 @@ public class ResourceChecker { if (sb.length() > 0) sb.append(", "); sb.append(ra.getName()).append("=").append(cur); } - LOG.info("before: " + tagLine + " " + sb); + LOG.info("before: {} {}", tagLine, sb); } private void logEndings() { assert initialValues.length == ras.size(); assert endingValues.length == ras.size(); + if (!LOG.isInfoEnabled()) { + return; + } + int i = 0; StringBuilder sb = new StringBuilder(); for (ResourceAnalyzer ra : ras) { @@ -169,7 +177,7 @@ public class ResourceChecker { sb.append(" - ").append(ra.getName()).append(" LEAK? -"); } } - LOG.info("after: " + tagLine + " " + sb); + LOG.info("after: {} {}", tagLine, sb); } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java index ce838fac75..635622b9b6 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java @@ -186,7 +186,7 @@ public class TestChoreService { } private void outputTickCount() { - log.info("Chore: " + getName() + ". Count of chore calls: " + countOfChoreCalls); + log.info("Chore: {}. Count of chore calls: {}", getName(), countOfChoreCalls); } public int getCountOfChoreCalls() { diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java index 0b17359eb1..e7eddcddaa 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java @@ -85,7 +85,7 @@ public class TestClassFinder { deleteTestDir(); } assertTrue(testDir.mkdirs()); - LOG.info("Using new, clean directory=" + testDir); + LOG.info("Using new, clean directory={}", testDir); } @AfterClass @@ -148,7 +148,8 @@ public class TestClassFinder { public void testClassFinderFiltersByNameInJar() throws Exception { final long counter = testCounter.incrementAndGet(); final String classNamePrefix = name.getMethodName(); - LOG.info("Created jar " + createAndLoadJar("", classNamePrefix, counter)); + String jar = createAndLoadJar("", classNamePrefix, counter); + LOG.info("Created jar {}", jar); ClassFinder.FileNameFilter notExcNameFilter = new ClassFinder.FileNameFilter() { @Override @@ -168,7 +169,8 @@ public class TestClassFinder { public void testClassFinderFiltersByClassInJar() throws Exception { final long counter = testCounter.incrementAndGet(); final String classNamePrefix = name.getMethodName(); - LOG.info("Created jar " + createAndLoadJar("", classNamePrefix, counter)); + String jar = createAndLoadJar("", classNamePrefix, counter); + LOG.info("Created jar {}", jar); final ClassFinder.ClassFilter notExcClassFilter = new ClassFinder.ClassFilter() { @Override @@ -230,7 +232,8 @@ public class TestClassFinder { final long counter = testCounter.incrementAndGet(); final String classNamePrefix = name.getMethodName(); String pkgNameSuffix = name.getMethodName(); - LOG.info("Created jar " + createAndLoadJar(pkgNameSuffix, classNamePrefix, counter)); + String jar = createAndLoadJar(pkgNameSuffix, classNamePrefix, counter); + LOG.info("Created jar {}", jar); ClassFinder allClassesFinder = new ClassFinder(); String pkgName = makePackageName(pkgNameSuffix, counter); Set> allClasses = allClassesFinder.findClasses(pkgName, false); @@ -253,7 +256,8 @@ public class TestClassFinder { final long counter = testCounter.incrementAndGet(); final String classNamePrefix = name.getMethodName(); String pkgNameSuffix = name.getMethodName(); - LOG.info("Created jar " + createAndLoadJar(pkgNameSuffix, classNamePrefix, counter)); + String jar = createAndLoadJar(pkgNameSuffix, classNamePrefix, counter); + LOG.info("Created jar {}", jar); final String classNameToFilterOut = classNamePrefix + counter; final ClassFinder.FileNameFilter notThisFilter = new ClassFinder.FileNameFilter() { @Override @@ -278,7 +282,8 @@ public class TestClassFinder { final long counter = testCounter.incrementAndGet(); final String classNamePrefix = name.getMethodName(); String pkgNameSuffix = name.getMethodName(); - LOG.info("Created jar " + createAndLoadJar(pkgNameSuffix, classNamePrefix, counter)); + String jar = createAndLoadJar(pkgNameSuffix, classNamePrefix, counter); + LOG.info("Created jar {}", jar); final Class clazz = makeClass(pkgNameSuffix, classNamePrefix, counter); final ClassFinder.ClassFilter notThisFilter = new ClassFinder.ClassFilter() { @Override diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java index cb422c0e76..d59af937b8 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java @@ -227,7 +227,7 @@ public class TestHBaseConfiguration { hadoopCredentialEntryClz = Class .forName(HADOOP_CRED_ENTRY_CLASS_NAME); } catch (ClassNotFoundException e) { - LOG.error("Failed to load class:" + e); + LOG.error("Failed to load class", e); return false; } @@ -254,7 +254,7 @@ public class TestHBaseConfiguration { clz.getCanonicalName()); throw e; } catch (NoSuchMethodException e) { - LOG.error("Failed to load the " + name + ": " + e); + LOG.error("Failed to load the {}", name, e); fail("no such method: " + name + " in " + clz.getCanonicalName()); throw e; } @@ -276,16 +276,13 @@ public class TestHBaseConfiguration { providersObj = getProvidersMethod.invoke(hadoopCredProviderFactory, conf); } catch (IllegalArgumentException e) { - LOG.error("Failed to invoke: " + getProvidersMethod.getName() + - ": " + e); + LOG.error("Failed to invoke: {}", getProvidersMethod.getName(), e); return null; } catch (IllegalAccessException e) { - LOG.error("Failed to invoke: " + getProvidersMethod.getName() + - ": " + e); + LOG.error("Failed to invoke: {}", getProvidersMethod.getName(), e); return null; } catch (InvocationTargetException e) { - LOG.error("Failed to invoke: " + getProvidersMethod.getName() + - ": " + e); + LOG.error("Failed to invoke: {}", getProvidersMethod.getName(), e); return null; } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java index 167a0302f4..c437bda893 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java @@ -89,7 +89,7 @@ public class TestKeyValue { @Test public void testBasics() throws Exception { - LOG.info("LOWKEY: " + KeyValue.LOWESTKEY.toString()); + LOG.info("LOWKEY: {}", KeyValue.LOWESTKEY); String name = "testBasics"; check(Bytes.toBytes(name), Bytes.toBytes(name), Bytes.toBytes(name), 1, @@ -111,7 +111,7 @@ public class TestKeyValue { row.length) == 0); assertTrue(CellUtil.matchingColumn(kv, family, qualifier)); // Call toString to make sure it works. - LOG.info(kv.toString()); + LOG.info("{}", kv); } @Test diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/Waiter.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/Waiter.java index 70750056b1..8ce27f4f44 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/Waiter.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/Waiter.java @@ -21,8 +21,6 @@ package org.apache.hadoop.hbase; import static org.junit.Assert.fail; -import java.text.MessageFormat; - import org.apache.hadoop.conf.Configuration; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; @@ -122,7 +120,7 @@ public final class Waiter { try { Thread.sleep((long) (getWaitForRatio(conf) * time)); } catch (InterruptedException ex) { - LOG.warn(MessageFormat.format("Sleep interrupted, {0}", ex.toString())); + LOG.warn("Sleep interrupted, {}", ex.toString()); } } @@ -186,8 +184,8 @@ public final class Waiter { Boolean interrupted = false; try { - LOG.info(MessageFormat.format("Waiting up to [{0}] milli-secs(wait.for.ratio=[{1}])", - adjustedTimeout, getWaitForRatio(conf))); + LOG.info("Waiting up to [{}] milli-secs(wait.for.ratio=[{}])", + adjustedTimeout, getWaitForRatio(conf)); while (!(eval = predicate.evaluate()) && (remainderWait = mustEnd - System.currentTimeMillis()) > 0) { try { @@ -202,16 +200,14 @@ public final class Waiter { } if (!eval) { if (interrupted) { - LOG.warn(MessageFormat.format("Waiting interrupted after [{0}] msec", - System.currentTimeMillis() - started)); + LOG.warn("Waiting interrupted after [{}] msec", + System.currentTimeMillis() - started); } else if (failIfTimeout) { String msg = getExplanation(predicate); - fail(MessageFormat - .format("Waiting timed out after [{0}] msec", adjustedTimeout) + msg); + fail("Waiting timed out after [" + adjustedTimeout + "] msec " + msg); } else { String msg = getExplanation(predicate); - LOG.warn( - MessageFormat.format("Waiting timed out after [{0}] msec", adjustedTimeout) + msg); + LOG.warn("Waiting timed out after [{}] msec {}", adjustedTimeout, msg); } } return (eval || interrupted) ? (System.currentTimeMillis() - started) : -1; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java index 2a468897f2..4b956ec255 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java @@ -92,7 +92,7 @@ public class TestEncryption { private void checkTransformSymmetry(byte[] keyBytes, byte[] iv, byte[] plaintext) throws Exception { - LOG.info("checkTransformSymmetry: AES, plaintext length = " + plaintext.length); + LOG.info("checkTransformSymmetry: AES, plaintext length = {}", plaintext.length); Configuration conf = HBaseConfiguration.create(); String algorithm = diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java index 0b22caaec5..e0dfdb5d0f 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java @@ -27,7 +27,9 @@ import java.security.Key; import java.security.KeyStore; import java.security.MessageDigest; import java.util.Properties; + import javax.crypto.spec.SecretKeySpec; + import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; import org.apache.hadoop.hbase.testclassification.MiscTests; @@ -37,8 +39,6 @@ import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; @Category({MiscTests.class, SmallTests.class}) public class TestKeyStoreKeyProvider { @@ -47,7 +47,6 @@ public class TestKeyStoreKeyProvider { public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestKeyStoreKeyProvider.class); - private static final Logger LOG = LoggerFactory.getLogger(TestKeyStoreKeyProvider.class); static final HBaseCommonTestingUtility TEST_UTIL = new HBaseCommonTestingUtility(); static final String ALIAS = "test"; static final String PASSWORD = "password"; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java index 0705a36571..353f007a7c 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java @@ -87,7 +87,7 @@ public class ClassLoaderTestHelper { LOG.info("Adding classes to jar file completed"); return true; } catch (Exception ex) { - LOG.error("Error: " + ex.getMessage()); + LOG.error("Error {}", ex.getMessage()); return false; } } @@ -150,7 +150,7 @@ public class ClassLoaderTestHelper { + System.getProperty("surefire.test.class.path"); options.add(classpath); - LOG.debug("Setting classpath to: " + classpath); + LOG.debug("Setting classpath to: {}", classpath); JavaCompiler.CompilationTask task = compiler.getTask(null, fm, null, options, null, cu); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/LoadTestKVGenerator.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/LoadTestKVGenerator.java index ad49e55371..5619b7d799 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/LoadTestKVGenerator.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/LoadTestKVGenerator.java @@ -61,8 +61,8 @@ public class LoadTestKVGenerator { byte[] expectedData = getValueForRowColumn(value.length, seedStrings); boolean equals = Bytes.equals(expectedData, value); if (!equals && LOG.isDebugEnabled() && logLimit > 0) { - LOG.debug("verify failed, expected value: " + Bytes.toStringBinary(expectedData) - + " actual value: "+ Bytes.toStringBinary(value)); + LOG.debug("verify failed, expected value: {} actual value: {}", + Bytes.toStringBinary(expectedData), Bytes.toStringBinary(value)); logLimit--; // this is not thread safe, but at worst we will have more logging } return equals; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestDynamicClassLoader.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestDynamicClassLoader.java index b85921588f..44f0a75783 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestDynamicClassLoader.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestDynamicClassLoader.java @@ -78,7 +78,7 @@ public class TestDynamicClassLoader { folder, className, null, ClassLoaderTestHelper.localDirPath(conf)); classLoader.loadClass(className); } catch (ClassNotFoundException cnfe) { - LOG.error("Should be able to load class " + className, cnfe); + LOG.error("Should be able to load class {}", className, cnfe); fail(cnfe.getMessage()); } } @@ -102,7 +102,7 @@ public class TestDynamicClassLoader { ClassLoaderTestHelper.buildJar(folder, className, null); classLoader.loadClass(className); } catch (ClassNotFoundException cnfe) { - LOG.error("Should be able to load class " + className, cnfe); + LOG.error("Should be able to load class {}", className, cnfe); fail(cnfe.getMessage()); } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestShowProperties.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestShowProperties.java index 1e5392e49d..4e91e91226 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestShowProperties.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestShowProperties.java @@ -44,7 +44,7 @@ public class TestShowProperties { public void testShowProperty() { Properties properties = System.getProperties(); for (java.util.Map.Entry prop : properties.entrySet()) { - LOG.info("Property " + prop.getKey() + "=" + prop.getValue()); + LOG.info("Property {}={}", prop.getKey(), prop.getValue()); } } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestThreads.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestThreads.java index 4f6da4ebcb..35856c2be8 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestThreads.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestThreads.java @@ -48,13 +48,13 @@ public class TestThreads { Thread sleeper = new Thread(new Runnable() { @Override public void run() { - LOG.debug("Sleeper thread: sleeping for " + SLEEP_TIME_MS); + LOG.debug("Sleeper thread: sleeping for {}", SLEEP_TIME_MS); Threads.sleepWithoutInterrupt(SLEEP_TIME_MS); LOG.debug("Sleeper thread: finished sleeping"); wasInterrupted.set(Thread.currentThread().isInterrupted()); } }); - LOG.debug("Starting sleeper thread (" + SLEEP_TIME_MS + " ms)"); + LOG.debug("Starting sleeper thread ({} ms)", SLEEP_TIME_MS); sleeper.start(); long startTime = System.currentTimeMillis(); LOG.debug("Main thread: sleeping for 200 ms"); @@ -79,7 +79,6 @@ public class TestThreads { // We expect to wait at least SLEEP_TIME_MS, but we can wait more if there is a GC. assertTrue("Elapsed time " + timeElapsed + " ms is out of the expected " + " sleep time of " + SLEEP_TIME_MS, SLEEP_TIME_MS - timeElapsed < TOLERANCE_MS); - LOG.debug("Target sleep time: " + SLEEP_TIME_MS + ", time elapsed: " + - timeElapsed); + LOG.debug("Target sleep time: {}, time elapsed: {}", SLEEP_TIME_MS, timeElapsed); } } -- 2.17.0