@@ -78,7 +78,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
public class KeyValue implements ExtendedCell {
private static final ArrayList EMPTY_ARRAY_LIST = new ArrayList<>();
- private static final Log LOG = LogFactory.getLog(KeyValue.class);
+ private static final Logger LOG = LoggerFactory.getLogger(KeyValue.class);
public static final int FIXED_OVERHEAD = ClassSize.OBJECT + // the KeyValue object itself
ClassSize.REFERENCE + // pointer to "bytes"
@@ -735,9 +735,9 @@ public class KeyValue implements ExtendedCell {
}
public KeyValue(Cell c) {
- this(c.getRowArray(), c.getRowOffset(), (int)c.getRowLength(),
- c.getFamilyArray(), c.getFamilyOffset(), (int)c.getFamilyLength(),
- c.getQualifierArray(), c.getQualifierOffset(), (int) c.getQualifierLength(),
+ this(c.getRowArray(), c.getRowOffset(), c.getRowLength(),
+ c.getFamilyArray(), c.getFamilyOffset(), c.getFamilyLength(),
+ c.getQualifierArray(), c.getQualifierOffset(), c.getQualifierLength(),
c.getTimestamp(), Type.codeToType(c.getTypeByte()), c.getValueArray(), c.getValueOffset(),
c.getValueLength(), c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
this.seqId = c.getSequenceId();
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java
index 927b2b3736..16842da307 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java
@@ -21,10 +21,9 @@ package org.apache.hadoop.hbase;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -42,7 +41,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Public
public abstract class ScheduledChore implements Runnable {
- private static final Log LOG = LogFactory.getLog(ScheduledChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ScheduledChore.class);
private final String name;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java
index b8816ad017..e1a96bdcf4 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java
@@ -23,18 +23,19 @@ import java.io.InputStream;
import java.io.PushbackInputStream;
import edu.umd.cs.findbugs.annotations.NonNull;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* TODO javadoc
*/
@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC, HBaseInterfaceAudience.PHOENIX})
public abstract class BaseDecoder implements Codec.Decoder {
- protected static final Log LOG = LogFactory.getLog(BaseDecoder.class);
+ protected static final Logger LOG = LoggerFactory.getLogger(BaseDecoder.class);
protected final InputStream in;
private Cell current = null;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferListOutputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferListOutputStream.java
index 61cc170dce..0b97abbb7d 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferListOutputStream.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferListOutputStream.java
@@ -23,10 +23,10 @@ import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* An OutputStream which writes data into ByteBuffers. It will try to get ByteBuffer, as and when
@@ -37,7 +37,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class ByteBufferListOutputStream extends ByteBufferOutputStream {
- private static final Log LOG = LogFactory.getLog(ByteBufferListOutputStream.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ByteBufferListOutputStream.class);
private ByteBufferPool pool;
// Keep track of the BBs where bytes written to. We will first try to get a BB from the pool. If
@@ -115,7 +115,7 @@ public class ByteBufferListOutputStream extends ByteBufferOutputStream {
try {
close();
} catch (IOException e) {
- LOG.debug(e);
+ LOG.debug(e.toString(), e);
}
// Return back all the BBs to pool
if (this.bufsFromPool != null) {
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferPool.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferPool.java
index 784c88fcf6..9c51f1bf4a 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferPool.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferPool.java
@@ -22,11 +22,10 @@ import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.yetus.audience.InterfaceAudience;
-
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Like Hadoops' ByteBufferPool only you do not specify desired size when getting a ByteBuffer. This
@@ -45,7 +44,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public class ByteBufferPool {
- private static final Log LOG = LogFactory.getLog(ByteBufferPool.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ByteBufferPool.class);
// TODO better config names?
// hbase.ipc.server.reservoir.initial.max -> hbase.ipc.server.reservoir.max.buffer.count
// hbase.ipc.server.reservoir.initial.buffer.size -> hbase.ipc.server.reservoir.buffer.size
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java
index ec4ce38d3a..d258ba2927 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java
@@ -23,8 +23,6 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.IOUtils;
@@ -39,6 +37,8 @@ import org.apache.hadoop.io.compress.DoNotPool;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Compression related stuff.
@@ -46,7 +46,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public final class Compression {
- private static final Log LOG = LogFactory.getLog(Compression.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Compression.class);
/**
* Prevent the instantiation of class.
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/ReusableStreamGzipCodec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/ReusableStreamGzipCodec.java
index 3db9d7ec44..05ea39b72a 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/ReusableStreamGzipCodec.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/ReusableStreamGzipCodec.java
@@ -22,14 +22,14 @@ import java.io.OutputStream;
import java.util.Arrays;
import java.util.zip.GZIPOutputStream;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.util.JVM;
import org.apache.hadoop.io.compress.CompressionOutputStream;
import org.apache.hadoop.io.compress.CompressorStream;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.io.compress.zlib.ZlibFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Fixes an inefficiency in Hadoop's Gzip codec, allowing to reuse compression
@@ -38,7 +38,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class ReusableStreamGzipCodec extends GzipCodec {
- private static final Log LOG = LogFactory.getLog(Compression.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Compression.class);
/**
* A bridge that wraps around a DeflaterOutputStream to make it a
@@ -70,7 +70,7 @@ public class ReusableStreamGzipCodec extends GzipCodec {
try {
gzipStream.close();
} catch (IOException e) {
- LOG.error(e);
+ LOG.error(e.toString(), e);
}
}
}
@@ -98,7 +98,7 @@ public class ReusableStreamGzipCodec extends GzipCodec {
*/
@Override
public void finish() throws IOException {
- if (HAS_BROKEN_FINISH) {
+ if (HAS_BROKEN_FINISH) {
if (!def.finished()) {
def.finish();
while (!def.finished()) {
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java
index 49cc61f231..af0089d02c 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java
@@ -27,13 +27,12 @@ import java.security.spec.InvalidKeySpecException;
import java.util.Arrays;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
+
import javax.crypto.SecretKeyFactory;
import javax.crypto.spec.PBEKeySpec;
import javax.crypto.spec.SecretKeySpec;
import org.apache.commons.io.IOUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
@@ -41,6 +40,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A facade for encryption algorithms and related support.
@@ -48,7 +49,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Public
public final class Encryption {
- private static final Log LOG = LogFactory.getLog(Encryption.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Encryption.class);
/**
* Crypto context
@@ -204,7 +205,7 @@ public final class Encryption {
/**
* Return a 128 bit key derived from the concatenation of the supplied
* arguments using PBKDF2WithHmacSHA1 at 10,000 iterations.
- *
+ *
*/
public static byte[] pbkdf128(String... args) {
byte[] salt = new byte[128];
@@ -227,7 +228,7 @@ public final class Encryption {
/**
* Return a 128 bit key derived from the concatenation of the supplied
* arguments using PBKDF2WithHmacSHA1 at 10,000 iterations.
- *
+ *
*/
public static byte[] pbkdf128(byte[]... args) {
byte[] salt = new byte[128];
@@ -420,7 +421,7 @@ public final class Encryption {
*/
public static Key getSecretKeyForSubject(String subject, Configuration conf)
throws IOException {
- KeyProvider provider = (KeyProvider)getKeyProvider(conf);
+ KeyProvider provider = getKeyProvider(conf);
if (provider != null) try {
Key[] keys = provider.getKeys(new String[] { subject });
if (keys != null && keys.length > 0) {
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AES.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AES.java
index 6c73bb4970..0ca1183662 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AES.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AES.java
@@ -23,20 +23,20 @@ import java.io.OutputStream;
import java.security.GeneralSecurityException;
import java.security.Key;
import java.security.SecureRandom;
+
import javax.crypto.spec.SecretKeySpec;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.io.crypto.Cipher;
import org.apache.hadoop.hbase.io.crypto.CipherProvider;
import org.apache.hadoop.hbase.io.crypto.Context;
import org.apache.hadoop.hbase.io.crypto.Decryptor;
import org.apache.hadoop.hbase.io.crypto.Encryptor;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.yetus.audience.InterfaceStability;
-
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* AES-128, provided by the JCE
@@ -48,7 +48,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
@InterfaceStability.Evolving
public class AES extends Cipher {
- private static final Log LOG = LogFactory.getLog(AES.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AES.class);
public static final String CIPHER_MODE_KEY = "hbase.crypto.algorithm.aes.mode";
public static final String CIPHER_PROVIDER_KEY = "hbase.crypto.algorithm.aes.provider";
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAES.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAES.java
index 412ea7947d..9a1f71dd5d 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAES.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAES.java
@@ -24,28 +24,28 @@ import java.security.GeneralSecurityException;
import java.security.Key;
import java.security.SecureRandom;
import java.util.Properties;
+
import javax.crypto.spec.SecretKeySpec;
import org.apache.commons.crypto.cipher.CryptoCipherFactory;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.io.crypto.Cipher;
import org.apache.hadoop.hbase.io.crypto.CipherProvider;
import org.apache.hadoop.hbase.io.crypto.Context;
import org.apache.hadoop.hbase.io.crypto.Decryptor;
import org.apache.hadoop.hbase.io.crypto.Encryptor;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.yetus.audience.InterfaceStability;
-
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class CommonsCryptoAES extends Cipher {
- private static final Log LOG = LogFactory.getLog(CommonsCryptoAES.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CommonsCryptoAES.class);
public static final String CIPHER_MODE_KEY = "hbase.crypto.commons.mode";
public static final String CIPHER_CLASSES_KEY = "hbase.crypto.commons.cipher.classes";
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexEncoderV1.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexEncoderV1.java
index 92a3a4f089..7dbbdba980 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexEncoderV1.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexEncoderV1.java
@@ -13,16 +13,16 @@ package org.apache.hadoop.hbase.io.encoding;
import java.io.DataOutputStream;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.io.ByteArrayOutputStream;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class RowIndexEncoderV1 {
- private static final Log LOG = LogFactory.getLog(RowIndexEncoderV1.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RowIndexEncoderV1.class);
/** The Cell previously appended. */
private Cell lastCell = null;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java
index dceafbd087..c52c764c1e 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java
@@ -23,11 +23,11 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.AuthUtil;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Keeps lists of superusers and super groups loaded from HBase configuration,
@@ -35,7 +35,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public final class Superusers {
- private static final Log LOG = LogFactory.getLog(Superusers.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Superusers.class);
/** Configuration key for superusers */
public static final String SUPERUSER_CONF_KEY = "hbase.superuser"; // Not getting a name
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.java
index b31a4f6d73..03d03d9fe4 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.java
@@ -18,15 +18,15 @@
package org.apache.hadoop.hbase.trace;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.htrace.core.HTraceConfiguration;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class HBaseHTraceConfiguration extends HTraceConfiguration {
- private static final Log LOG = LogFactory.getLog(HBaseHTraceConfiguration.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HBaseHTraceConfiguration.class);
public static final String KEY_PREFIX = "hbase.htrace.";
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java
index 89339c536f..14ef945d75 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java
@@ -21,11 +21,11 @@ import java.io.IOException;
import java.util.Collection;
import java.util.HashSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.htrace.core.SpanReceiver;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This class provides functions for reading the names of SpanReceivers from
@@ -35,7 +35,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class SpanReceiverHost {
public static final String SPAN_RECEIVERS_CONF_KEY = "hbase.trace.spanreceiver.classes";
- private static final Log LOG = LogFactory.getLog(SpanReceiverHost.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SpanReceiverHost.class);
private Collection receivers;
private Configuration conf;
private boolean closed = false;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java
index 21b174e71b..9414e31532 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java
@@ -31,14 +31,14 @@ import org.apache.commons.cli.MissingOptionException;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Common base class used for HBase command-line tools. Simplifies workflow and
@@ -55,7 +55,7 @@ public abstract class AbstractHBaseTool implements Tool, Configurable {
private static final Option HELP_OPTION = new Option("h", "help", false,
"Prints help for this tool.");
- private static final Log LOG = LogFactory.getLog(AbstractHBaseTool.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AbstractHBaseTool.class);
protected final Options options = new Options();
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java
index 91df2d56ac..56784ed554 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java
@@ -39,9 +39,9 @@ import java.nio.charset.StandardCharsets;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Encodes and decodes to and from Base64 notation.
@@ -158,7 +158,7 @@ public class Base64 {
/* ******** P R I V A T E F I E L D S ******** */
- private static final Log LOG = LogFactory.getLog(Base64.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Base64.class);
/** Maximum line length (76) of Base64 output. */
private final static int MAX_LINE_LENGTH = 76;
@@ -1100,7 +1100,7 @@ public class Base64 {
// Check the size of file
if (file.length() > Integer.MAX_VALUE) {
- LOG.fatal("File is too big for this convenience method (" +
+ LOG.error("File is too big for this convenience method (" +
file.length() + " bytes).");
return null;
} // end if: file too big for int index
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java
index b2e5c9b751..6782de6bd0 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java
@@ -28,14 +28,15 @@ import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
-import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.nio.ByteBuff;
import org.apache.hadoop.hbase.nio.MultiByteBuff;
import org.apache.hadoop.hbase.nio.SingleByteBuff;
import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.annotations.VisibleForTesting;
/**
* This class manages an array of ByteBuffers with a default size 4MB. These
@@ -44,7 +45,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class ByteBufferArray {
- private static final Log LOG = LogFactory.getLog(ByteBufferArray.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ByteBufferArray.class);
public static final int DEFAULT_BUFFER_SIZE = 4 * 1024 * 1024;
@VisibleForTesting
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
index c32649b067..7ed60e6663 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
@@ -36,20 +36,21 @@ import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
-import com.google.protobuf.ByteString;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.io.RawComparator;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.io.WritableUtils;
import org.apache.yetus.audience.InterfaceAudience;
-import sun.misc.Unsafe;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
-import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
+import com.google.protobuf.ByteString;
+
+import sun.misc.Unsafe;
/**
* Utility class that handles byte arrays, conversions to/from other types,
@@ -70,7 +71,7 @@ public class Bytes implements Comparable {
//HConstants.EMPTY_BYTE_ARRAY should be updated if this changed
private static final byte [] EMPTY_BYTE_ARRAY = new byte [0];
- private static final Log LOG = LogFactory.getLog(Bytes.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Bytes.class);
/**
* Size of boolean in bytes
@@ -1533,8 +1534,8 @@ public class Bytes implements Comparable {
final int stride = 8;
final int minLength = Math.min(length1, length2);
int strideLimit = minLength & ~(stride - 1);
- final long offset1Adj = (long) offset1 + UnsafeAccess.BYTE_ARRAY_BASE_OFFSET;
- final long offset2Adj = (long) offset2 + UnsafeAccess.BYTE_ARRAY_BASE_OFFSET;
+ final long offset1Adj = offset1 + UnsafeAccess.BYTE_ARRAY_BASE_OFFSET;
+ final long offset2Adj = offset2 + UnsafeAccess.BYTE_ARRAY_BASE_OFFSET;
int i;
/*
@@ -1542,8 +1543,8 @@ public class Bytes implements Comparable {
* than 4 bytes even on 32-bit. On the other hand, it is substantially faster on 64-bit.
*/
for (i = 0; i < strideLimit; i += stride) {
- long lw = theUnsafe.getLong(buffer1, offset1Adj + (long) i);
- long rw = theUnsafe.getLong(buffer2, offset2Adj + (long) i);
+ long lw = theUnsafe.getLong(buffer1, offset1Adj + i);
+ long rw = theUnsafe.getLong(buffer2, offset2Adj + i);
if (lw != rw) {
if(!UnsafeAccess.littleEndian) {
return ((lw + Long.MIN_VALUE) < (rw + Long.MIN_VALUE)) ? -1 : 1;
@@ -1936,7 +1937,7 @@ public class Bytes implements Comparable {
public static int hashCode(byte[] bytes, int offset, int length) {
int hash = 1;
for (int i = offset; i < offset + length; i++)
- hash = (31 * hash) + (int) bytes[i];
+ hash = (31 * hash) + bytes[i];
return hash;
}
@@ -2517,7 +2518,7 @@ public class Bytes implements Comparable {
}
return new String(ch);
}
-
+
/**
* Convert a byte array into a hex string
*/
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
index efcf8d0bec..9a7f0ef70e 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
@@ -25,10 +25,9 @@ import java.lang.reflect.Modifier;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentSkipListMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
@@ -40,7 +39,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public class ClassSize {
- private static final Log LOG = LogFactory.getLog(ClassSize.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ClassSize.class);
/** Array overhead */
public static final int ARRAY;
@@ -197,7 +196,7 @@ public class ClassSize {
return (int) UnsafeAccess.theUnsafe.objectFieldOffset(
HeaderSize.class.getDeclaredField("a"));
} catch (NoSuchFieldException | SecurityException e) {
- LOG.error(e);
+ LOG.error(e.toString(), e);
}
return super.headerSize();
}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java
index eba3b12abc..9810743e73 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java
@@ -29,8 +29,6 @@ import java.util.Locale;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -43,18 +41,19 @@ import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.ipc.RemoteException;
-import org.apache.yetus.audience.InterfaceAudience;
-
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
+import org.apache.hadoop.ipc.RemoteException;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Utility methods for interacting with the underlying file system.
*/
@InterfaceAudience.Private
public abstract class CommonFSUtils {
- private static final Log LOG = LogFactory.getLog(CommonFSUtils.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CommonFSUtils.class);
/** Parameter name for HBase WAL directory */
public static final String HBASE_WAL_DIR = "hbase.wal.dir";
@@ -731,7 +730,7 @@ public abstract class CommonFSUtils {
* @param LOG log to output information
* @throws IOException if an unexpected exception occurs
*/
- public static void logFileSystemState(final FileSystem fs, final Path root, Log LOG)
+ public static void logFileSystemState(final FileSystem fs, final Path root, Logger LOG)
throws IOException {
LOG.debug("File system contents for path " + root);
logFSTree(LOG, fs, root, "|-");
@@ -740,9 +739,9 @@ public abstract class CommonFSUtils {
/**
* Recursive helper to log the state of the FS
*
- * @see #logFileSystemState(FileSystem, Path, Log)
+ * @see #logFileSystemState(FileSystem, Path, Logger)
*/
- private static void logFSTree(Log LOG, final FileSystem fs, final Path root, String prefix)
+ private static void logFSTree(Logger LOG, final FileSystem fs, final Path root, String prefix)
throws IOException {
FileStatus[] files = listStatus(fs, root, null);
if (files == null) {
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java
index 15828ed8ee..cf101e043a 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java
@@ -34,18 +34,17 @@ import java.util.jar.JarFile;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.IOUtils;
-import org.apache.yetus.audience.InterfaceAudience;
-
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* ClassLoader used to load classes for Coprocessor instances.
@@ -76,7 +75,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker;
*/
@InterfaceAudience.Private
public class CoprocessorClassLoader extends ClassLoaderBase {
- private static final Log LOG = LogFactory.getLog(CoprocessorClassLoader.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CoprocessorClassLoader.class);
// A temporary place ${hbase.local.dir}/jars/tmp/ to store the local
// copy of the jar file and the libraries contained in the jar.
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java
index 75dcf5f0d2..28fce21b1c 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java
@@ -23,13 +23,13 @@ import java.net.MalformedURLException;
import java.net.URL;
import java.util.HashMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This is a class loader that can load classes dynamically from new
@@ -57,8 +57,8 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class DynamicClassLoader extends ClassLoaderBase {
- private static final Log LOG =
- LogFactory.getLog(DynamicClassLoader.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(DynamicClassLoader.class);
// Dynamic jars are put under ${hbase.local.dir}/jars/
private static final String DYNAMIC_JARS_DIR = File.separator
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java
index 532f8419b4..ab95b31263 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java
@@ -25,6 +25,7 @@ import java.lang.reflect.Array;
import java.nio.charset.StandardCharsets;
import java.util.Iterator;
import java.util.Set;
+
import javax.management.AttributeNotFoundException;
import javax.management.InstanceNotFoundException;
import javax.management.IntrospectionException;
@@ -41,17 +42,18 @@ import javax.management.openmbean.CompositeData;
import javax.management.openmbean.CompositeType;
import javax.management.openmbean.TabularData;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.core.JsonGenerator;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
/**
* Utility for doing JSON and MBeans.
*/
public class JSONBean {
- private static final Log LOG = LogFactory.getLog(JSONBean.class);
+ private static final Logger LOG = LoggerFactory.getLogger(JSONBean.class);
private final JsonFactory jsonFactory;
public JSONBean() {
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java
index ae967a16e4..b6c05b67c9 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java
@@ -28,6 +28,7 @@ import java.lang.management.RuntimeMXBean;
import java.util.Hashtable;
import java.util.List;
import java.util.Set;
+
import javax.management.InstanceNotFoundException;
import javax.management.MBeanAttributeInfo;
import javax.management.MBeanInfo;
@@ -37,15 +38,16 @@ import javax.management.ObjectName;
import javax.management.ReflectionException;
import javax.management.openmbean.CompositeData;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
public final class JSONMetricUtil {
- private static final Log LOG = LogFactory.getLog(JSONMetricUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(JSONMetricUtil.class);
private static MBeanServer mbServer = ManagementFactory.getPlatformMBeanServer();
//MBeans ObjectName domain names
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java
index ab966f17b4..6657481ed0 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java
@@ -28,9 +28,9 @@ import java.lang.management.RuntimeMXBean;
import java.lang.reflect.Method;
import java.nio.charset.StandardCharsets;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
@@ -42,7 +42,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class JVM {
- private static final Log LOG = LogFactory.getLog(JVM.class);
+ private static final Logger LOG = LoggerFactory.getLogger(JVM.class);
private OperatingSystemMXBean osMbean;
private static final boolean ibmvendor =
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MD5Hash.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MD5Hash.java
index 804c1cdf45..c38f1a9f8b 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MD5Hash.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MD5Hash.java
@@ -23,9 +23,9 @@ import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import org.apache.commons.codec.binary.Hex;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Utility class for MD5
@@ -33,7 +33,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Public
public class MD5Hash {
- private static final Log LOG = LogFactory.getLog(MD5Hash.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MD5Hash.class);
/**
* Given a byte array, returns in MD5 hash as a hex string.
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Methods.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Methods.java
index 296dc643d2..7034890960 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Methods.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Methods.java
@@ -24,13 +24,13 @@ import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.UndeclaredThrowableException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class Methods {
- private static final Log LOG = LogFactory.getLog(Methods.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Methods.class);
public static Object call(Class clazz, T instance, String methodName,
Class[] types, Object[] args) throws Exception {
@@ -38,7 +38,7 @@ public class Methods {
Method m = clazz.getMethod(methodName, types);
return m.invoke(instance, args);
} catch (IllegalArgumentException arge) {
- LOG.fatal("Constructed invalid call. class="+clazz.getName()+
+ LOG.error("Constructed invalid call. class="+clazz.getName()+
" method=" + methodName + " types=" + Classes.stringify(types), arge);
throw arge;
} catch (NoSuchMethodException nsme) {
@@ -59,7 +59,7 @@ public class Methods {
throw new IllegalArgumentException(
"Denied access calling "+clazz.getName()+"."+methodName+"()", iae);
} catch (SecurityException se) {
- LOG.fatal("SecurityException calling method. class="+clazz.getName()+
+ LOG.error("SecurityException calling method. class="+clazz.getName()+
" method=" + methodName + " types=" + Classes.stringify(types), se);
throw se;
}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PrettyPrinter.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PrettyPrinter.java
index 4f4b775af0..147e9160f9 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PrettyPrinter.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PrettyPrinter.java
@@ -22,16 +22,16 @@ package org.apache.hadoop.hbase.util;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.exceptions.HBaseException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class PrettyPrinter {
- private static final Log LOG = LogFactory.getLog(PrettyPrinter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(PrettyPrinter.class);
private static final String INTERVAL_REGEX = "((\\d+)\\s*SECONDS?\\s*\\()?\\s*" +
"((\\d+)\\s*DAYS?)?\\s*((\\d+)\\s*HOURS?)?\\s*" +
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java
index 6430d2eb12..a136846a92 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java
@@ -29,9 +29,10 @@ import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.nio.charset.Charset;
-import edu.umd.cs.findbugs.annotations.NonNull;
-import org.apache.commons.logging.Log;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+
+import edu.umd.cs.findbugs.annotations.NonNull;
@InterfaceAudience.Private
public class ReflectionUtils {
@@ -111,7 +112,7 @@ public class ReflectionUtils {
* @param title a descriptive title for the call stacks
* @param minInterval the minimum time from the last
*/
- public static void logThreadInfo(Log log,
+ public static void logThreadInfo(Logger log,
String title,
long minInterval) {
boolean dumpStack = false;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RetryCounter.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RetryCounter.java
index 116b6cce7d..ffcd9cab33 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RetryCounter.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RetryCounter.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.util;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class RetryCounter {
@@ -127,7 +127,7 @@ public class RetryCounter {
}
}
- private static final Log LOG = LogFactory.getLog(RetryCounter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RetryCounter.class);
private RetryConfig retryConfig;
private int attempts;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java
index 326da2e69c..7d4d692e1a 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java
@@ -18,10 +18,10 @@
*/
package org.apache.hadoop.hbase.util;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Sleeper for current thread.
@@ -31,7 +31,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class Sleeper {
- private static final Log LOG = LogFactory.getLog(Sleeper.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Sleeper.class);
private final int period;
private final Stoppable stopper;
private static final long MINIMAL_DELTA_FOR_LOGGING = 10000;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java
index 4e2f09f611..6acea35b2b 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java
@@ -31,20 +31,19 @@ import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;
-
-import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Thread Utility
*/
@InterfaceAudience.Private
public class Threads {
- private static final Log LOG = LogFactory.getLog(Threads.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Threads.class);
private static final AtomicInteger poolNumber = new AtomicInteger(1);
public static final UncaughtExceptionHandler LOGGING_EXCEPTION_HANDLER =
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java
index 50fef6d823..feaa9e6624 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java
@@ -23,10 +23,11 @@ import java.nio.ByteOrder;
import java.security.AccessController;
import java.security.PrivilegedAction;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import sun.misc.Unsafe;
import sun.nio.ch.DirectBuffer;
@@ -34,7 +35,7 @@ import sun.nio.ch.DirectBuffer;
@InterfaceStability.Evolving
public final class UnsafeAccess {
- private static final Log LOG = LogFactory.getLog(UnsafeAccess.class);
+ private static final Logger LOG = LoggerFactory.getLogger(UnsafeAccess.class);
static final Unsafe theUnsafe;
@@ -325,7 +326,7 @@ public final class UnsafeAccess {
destAddress = destAddress + BYTE_ARRAY_BASE_OFFSET + dest.arrayOffset();
destBase = dest.array();
}
- long srcAddress = (long) srcOffset + BYTE_ARRAY_BASE_OFFSET;
+ long srcAddress = srcOffset + BYTE_ARRAY_BASE_OFFSET;
unsafeCopy(src, srcAddress, destBase, destAddress, length);
}
@@ -359,7 +360,7 @@ public final class UnsafeAccess {
srcAddress = srcAddress + BYTE_ARRAY_BASE_OFFSET + src.arrayOffset();
srcBase = src.array();
}
- long destAddress = (long) destOffset + BYTE_ARRAY_BASE_OFFSET;
+ long destAddress = destOffset + BYTE_ARRAY_BASE_OFFSET;
unsafeCopy(srcBase, srcAddress, dest, destAddress, length);
}
@@ -386,7 +387,7 @@ public final class UnsafeAccess {
if (dest.isDirect()) {
destAddress = destOffset + ((DirectBuffer) dest).address();
} else {
- destAddress = (long) destOffset + BYTE_ARRAY_BASE_OFFSET + dest.arrayOffset();
+ destAddress = destOffset + BYTE_ARRAY_BASE_OFFSET + dest.arrayOffset();
destBase = dest.array();
}
unsafeCopy(srcBase, srcAddress, destBase, destAddress, length);
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAvailChecker.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAvailChecker.java
index 8fe70443ae..88dd524296 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAvailChecker.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAvailChecker.java
@@ -22,15 +22,15 @@ import java.lang.reflect.Method;
import java.security.AccessController;
import java.security.PrivilegedAction;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class UnsafeAvailChecker {
private static final String CLASS_NAME = "sun.misc.Unsafe";
- private static final Log LOG = LogFactory.getLog(UnsafeAvailChecker.class);
+ private static final Logger LOG = LoggerFactory.getLogger(UnsafeAvailChecker.class);
private static boolean avail = false;
private static boolean unaligned = false;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java
index 07b9c5880a..9d9b563062 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java
@@ -21,17 +21,17 @@ package org.apache.hadoop.hbase.util;
import java.io.PrintStream;
import java.io.PrintWriter;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Version;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This class finds the Version information for HBase.
*/
@InterfaceAudience.Public
public class VersionInfo {
- private static final Log LOG = LogFactory.getLog(VersionInfo.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(VersionInfo.class.getName());
// If between two dots there is not a number, we regard it as a very large number so it is
// higher than any numbers in the version.
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java
index db203b80b7..4aa81d8c13 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java
@@ -33,8 +33,8 @@ import java.util.jar.JarInputStream;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A class that finds a set of classes that are locally accessible
@@ -42,7 +42,7 @@ import org.apache.commons.logging.LogFactory;
* imposed by name and class filters provided by the user.
*/
public class ClassFinder {
- private static final Log LOG = LogFactory.getLog(ClassFinder.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ClassFinder.class);
private static String CLASS_EXT = ".class";
private ResourcePathFilter resourcePathFilter;
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
index d84e8ec5c4..693f9b2f74 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
@@ -25,13 +25,13 @@ import java.util.List;
import java.util.UUID;
import org.apache.commons.io.FileUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Waiter.Predicate;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Common helpers for testing HBase that do not depend on specific server/etc. things.
@@ -39,7 +39,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Public
public class HBaseCommonTestingUtility {
- protected static final Log LOG = LogFactory.getLog(HBaseCommonTestingUtility.class);
+ protected static final Logger LOG = LoggerFactory.getLogger(HBaseCommonTestingUtility.class);
/** Compression algorithms to use in parameterized JUnit 4 tests */
public static final List
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apiorg.apache.curator
diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java
index 4535cab129..7b071f412c 100644
--- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java
+++ b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java
@@ -38,13 +38,13 @@ import java.util.NavigableSet;
import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Result;
@@ -83,7 +83,7 @@ import org.apache.hadoop.hbase.util.Pair;
@InterfaceAudience.Public
public class AggregationClient implements Closeable {
// TODO: This class is not used. Move to examples?
- private static final Log log = LogFactory.getLog(AggregationClient.class);
+ private static final Logger log = LoggerFactory.getLogger(AggregationClient.class);
private final Connection connection;
/**
diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java
index 6e0e6d447a..6beb3f66fd 100644
--- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java
+++ b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java
@@ -33,11 +33,11 @@ import java.util.Collections;
import java.util.List;
import java.util.NavigableSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
@@ -62,7 +62,7 @@ import org.apache.hadoop.hbase.regionserver.InternalScanner;
@InterfaceAudience.Private
public class AggregateImplementation
extends AggregateService implements RegionCoprocessor {
- protected static final Log log = LogFactory.getLog(AggregateImplementation.class);
+ protected static final Logger log = LoggerFactory.getLogger(AggregateImplementation.class);
private RegionCoprocessorEnvironment env;
/**
diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java
index 66e9e044f6..f642d610b7 100644
--- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java
+++ b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java
@@ -29,8 +29,6 @@ import java.util.Map;
import java.util.TreeMap;
import org.apache.commons.lang3.ArrayUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -75,7 +73,8 @@ import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import com.google.protobuf.RpcCallback;
@@ -93,7 +92,7 @@ import com.google.protobuf.Service;
@InterfaceStability.Evolving
public class Export extends ExportProtos.ExportService implements RegionCoprocessor {
- private static final Log LOG = LogFactory.getLog(Export.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Export.class);
private static final Class extends CompressionCodec> DEFAULT_CODEC = DefaultCodec.class;
private static final SequenceFile.CompressionType DEFAULT_TYPE = SequenceFile.CompressionType.RECORD;
private RegionCoprocessorEnvironment env = null;
@@ -341,7 +340,7 @@ public class Export extends ExportProtos.ExportService implements RegionCoproces
done.run(response);
} catch (IOException e) {
CoprocessorRpcUtils.setControllerException(controller, e);
- LOG.error(e);
+ LOG.error(e.toString(), e);
}
}
diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java
index 69d8491f9a..18c932e22a 100644
--- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java
+++ b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java
@@ -23,8 +23,6 @@ import java.util.Collections;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.CoreCoprocessor;
@@ -51,6 +49,8 @@ import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
import com.google.protobuf.Service;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Coprocessor service for bulk loads in secure mode.
@@ -63,7 +63,7 @@ public class SecureBulkLoadEndpoint extends SecureBulkLoadService implements Reg
public static final long VERSION = 0L;
- private static final Log LOG = LogFactory.getLog(SecureBulkLoadEndpoint.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SecureBulkLoadEndpoint.class);
private RegionCoprocessorEnvironment env;
private RegionServerServices rsServices;
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java
index cfcb565cc3..fd570e7854 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java
@@ -23,8 +23,6 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
@@ -35,6 +33,8 @@ import org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationP
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.util.Bytes;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
@@ -45,7 +45,7 @@ import com.google.protobuf.Service;
*/
public class ColumnAggregationEndpoint extends ColumnAggregationService
implements RegionCoprocessor {
- private static final Log LOG = LogFactory.getLog(ColumnAggregationEndpoint.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ColumnAggregationEndpoint.class);
private RegionCoprocessorEnvironment env = null;
@Override
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java
index 80316d3984..c8de7d0168 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java
@@ -22,8 +22,6 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
@@ -36,6 +34,8 @@ import org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationW
import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.util.Bytes;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
@@ -48,7 +48,7 @@ import com.google.protobuf.Service;
*/
public class ColumnAggregationEndpointNullResponse
extends ColumnAggregationServiceNullResponse implements RegionCoprocessor {
- private static final Log LOG = LogFactory.getLog(ColumnAggregationEndpointNullResponse.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ColumnAggregationEndpointNullResponse.class);
private RegionCoprocessorEnvironment env = null;
@Override
public Iterable getServices() {
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java
index 49b79ce51f..29e4658489 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java
@@ -22,8 +22,6 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
@@ -37,6 +35,8 @@ import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.util.Bytes;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
@@ -50,7 +50,7 @@ import com.google.protobuf.Service;
public class ColumnAggregationEndpointWithErrors
extends ColumnAggregationWithErrorsProtos.ColumnAggregationServiceWithErrors
implements RegionCoprocessor {
- private static final Log LOG = LogFactory.getLog(ColumnAggregationEndpointWithErrors.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ColumnAggregationEndpointWithErrors.class);
private RegionCoprocessorEnvironment env = null;
@Override
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java
index eecf7a3f0c..5433792986 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java
@@ -25,8 +25,6 @@ import java.util.Collections;
import java.util.Map;
import java.util.TreeMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -51,6 +49,8 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.google.protobuf.ByteString;
import com.google.protobuf.ServiceException;
@@ -60,7 +60,7 @@ import com.google.protobuf.ServiceException;
*/
@Category({CoprocessorTests.class, MediumTests.class})
public class TestBatchCoprocessorEndpoint {
- private static final Log LOG = LogFactory.getLog(TestBatchCoprocessorEndpoint.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestBatchCoprocessorEndpoint.class);
private static final TableName TEST_TABLE =
TableName.valueOf("TestTable");
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
index e52e032823..37e5a78cb1 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
@@ -18,8 +18,6 @@
*/
package org.apache.hadoop.hbase.coprocessor;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
@@ -48,6 +46,8 @@ import java.util.*;
import org.junit.*;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
@@ -59,7 +59,7 @@ import static org.junit.Assert.assertFalse;
*/
@Category({CoprocessorTests.class, MediumTests.class})
public class TestClassLoading {
- private static final Log LOG = LogFactory.getLog(TestClassLoading.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestClassLoading.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
public static class TestMasterCoprocessor implements MasterCoprocessor, MasterObserver {
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java
index adfd8d53e5..376c071586 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java
@@ -30,8 +30,6 @@ import java.util.List;
import java.util.Map;
import java.util.TreeMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -59,6 +57,8 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.google.protobuf.RpcController;
import com.google.protobuf.ServiceException;
@@ -68,7 +68,7 @@ import com.google.protobuf.ServiceException;
*/
@Category({CoprocessorTests.class, MediumTests.class})
public class TestCoprocessorEndpoint {
- private static final Log LOG = LogFactory.getLog(TestCoprocessorEndpoint.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestCoprocessorEndpoint.class);
private static final TableName TEST_TABLE =
TableName.valueOf("TestCoprocessorEndpoint");
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
index 8c11192677..8a79400bcb 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
@@ -74,10 +74,10 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.google.protobuf.Message;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
/**
* Verifies ProcessEndpoint works.
@@ -86,7 +86,7 @@ import org.apache.commons.logging.LogFactory;
@Category({CoprocessorTests.class, MediumTests.class})
public class TestRowProcessorEndpoint {
- private static final Log LOG = LogFactory.getLog(TestRowProcessorEndpoint.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRowProcessorEndpoint.class);
private static final TableName TABLE = TableName.valueOf("testtable");
private final static byte[] ROW = Bytes.toBytes("testrow");
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
index e4cd54d581..76ef82504e 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
@@ -27,8 +27,7 @@ import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -82,10 +81,12 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MediumTests.class})
public class TestSecureExport {
- private static final Log LOG = LogFactory.getLog(TestSecureExport.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSecureExport.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static MiniKdc KDC;
private static final File KEYTAB_FILE = new File(UTIL.getDataTestDir("keytab").toUri().getPath());
@@ -311,7 +312,7 @@ public class TestSecureExport {
} catch (ServiceException | IOException ex) {
throw ex;
} catch (Throwable ex) {
- LOG.error(ex);
+ LOG.error(ex.toString(), ex);
throw new Exception(ex);
} finally {
clearOutput(output);
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java
index 25953bc2a9..2daacde5a0 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java
@@ -22,8 +22,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HConstants;
@@ -49,7 +47,8 @@ import org.junit.Ignore;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
/**
@@ -64,8 +63,8 @@ public class TestHRegionServerBulkLoadWithOldSecureEndpoint extends TestHRegionS
super(duration);
}
- private static final Log LOG =
- LogFactory.getLog(TestHRegionServerBulkLoadWithOldSecureEndpoint.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestHRegionServerBulkLoadWithOldSecureEndpoint.class);
@BeforeClass
public static void setUpBeforeClass() throws IOException {
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java
index bad01f3eae..0b17abf0dc 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java
@@ -26,8 +26,6 @@ import java.io.IOException;
import java.util.Collections;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HRegionLocation;
@@ -61,6 +59,8 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
@@ -69,7 +69,7 @@ import com.google.protobuf.ServiceException;
@Category({RegionServerTests.class, MediumTests.class})
public class TestServerCustomProtocol {
- private static final Log LOG = LogFactory.getLog(TestServerCustomProtocol.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestServerCustomProtocol.class);
static final String WHOAREYOU = "Who are you?";
static final String NOBODY = "nobody";
static final String HELLO = "Hello, ";
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpToolWithBulkLoadedData.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpToolWithBulkLoadedData.java
index 0b33d20e7d..1b74b7d724 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpToolWithBulkLoadedData.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpToolWithBulkLoadedData.java
@@ -21,8 +21,6 @@ import java.util.List;
import java.util.Set;
import java.util.UUID;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HConstants;
@@ -37,12 +35,14 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.HFileTestUtil;
import org.junit.BeforeClass;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ ReplicationTests.class, LargeTests.class })
public class TestReplicationSyncUpToolWithBulkLoadedData extends TestReplicationSyncUpTool {
- private static final Log LOG = LogFactory
- .getLog(TestReplicationSyncUpToolWithBulkLoadedData.class);
+ private static final Logger LOG = LoggerFactory
+ .getLogger(TestReplicationSyncUpToolWithBulkLoadedData.class);
@BeforeClass
public static void setUpBeforeClass() throws Exception {
diff --git a/hbase-examples/pom.xml b/hbase-examples/pom.xml
index 2b94633129..e86d4f1850 100644
--- a/hbase-examples/pom.xml
+++ b/hbase-examples/pom.xml
@@ -158,8 +158,8 @@
commons-io
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apiorg.apache.zookeeper
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/AsyncClientExample.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/AsyncClientExample.java
index 67aba62fc6..63d00fb27e 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/AsyncClientExample.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/AsyncClientExample.java
@@ -25,8 +25,6 @@ import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.IntStream;
import org.apache.commons.io.IOUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.AsyncConnection;
@@ -38,13 +36,15 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A simple example shows how to use asynchronous client.
*/
public class AsyncClientExample extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(AsyncClientExample.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AsyncClientExample.class);
/**
* The size for thread pool.
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/BufferedMutatorExample.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/BufferedMutatorExample.java
index 7b11684b4e..8e8a828870 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/BufferedMutatorExample.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/BufferedMutatorExample.java
@@ -18,8 +18,17 @@
*/
package org.apache.hadoop.hbase.client.example;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.BufferedMutator;
@@ -31,24 +40,15 @@ import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* An example of using the {@link BufferedMutator} interface.
*/
public class BufferedMutatorExample extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(BufferedMutatorExample.class);
+ private static final Logger LOG = LoggerFactory.getLogger(BufferedMutatorExample.class);
private static final int POOL_SIZE = 10;
private static final int TASK_COUNT = 100;
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java
index 5d95fde511..1285f83fd9 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.hbase.client.example;
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.CellBuilderFactory;
@@ -38,6 +36,8 @@ import org.apache.hadoop.hbase.filter.KeyOnlyFilter;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
@@ -90,7 +90,7 @@ import java.util.concurrent.TimeUnit;
*
*/
public class MultiThreadedClientExample extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(MultiThreadedClientExample.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedClientExample.class);
private static final int DEFAULT_NUM_OPERATIONS = 500000;
/**
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java
index 0401959b68..a829b2ab1b 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.hbase.client.example;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
@@ -31,6 +29,8 @@ import org.apache.hadoop.hbase.client.coprocessor.Batch;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils.BlockingRpcCallback;
import org.apache.hadoop.hbase.ipc.ServerRpcController;
import org.apache.hadoop.hbase.protobuf.generated.RefreshHFilesProtos;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.Closeable;
import java.io.IOException;
@@ -40,7 +40,7 @@ import java.io.IOException;
* Region Server side via the RefreshHFilesService.
*/
public class RefreshHFilesClient implements Closeable {
- private static final Log LOG = LogFactory.getLog(RefreshHFilesClient.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RefreshHFilesClient.class);
private final Connection connection;
/**
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
index 233ea18ead..4735b3db72 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
@@ -25,8 +25,6 @@ import java.util.List;
import java.util.Set;
import java.util.TreeSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
@@ -50,6 +48,8 @@ import org.apache.hadoop.hbase.regionserver.OperationStatus;
import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.regionserver.RegionScanner;
import org.apache.hadoop.hbase.util.Bytes;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
@@ -97,7 +97,7 @@ import com.google.protobuf.Service;
*/
public class BulkDeleteEndpoint extends BulkDeleteService implements RegionCoprocessor {
private static final String NO_OF_VERSIONS_TO_DELETE = "noOfVersionsToDelete";
- private static final Log LOG = LogFactory.getLog(BulkDeleteEndpoint.class);
+ private static final Logger LOG = LoggerFactory.getLogger(BulkDeleteEndpoint.class);
private RegionCoprocessorEnvironment env;
@@ -167,7 +167,7 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements RegionCopro
}
}
} catch (IOException ioe) {
- LOG.error(ioe);
+ LOG.error(ioe.toString(), ioe);
// Call ServerRpcController#getFailedOn() to retrieve this IOException at client side.
CoprocessorRpcUtils.setControllerException(controller, ioe);
} finally {
@@ -175,7 +175,7 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements RegionCopro
try {
scanner.close();
} catch (IOException ioe) {
- LOG.error(ioe);
+ LOG.error(ioe.toString(), ioe);
}
}
}
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.java
index c27672cf9d..e916cb38f6 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.coprocessor.example;
import java.io.IOException;
import java.util.Optional;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -35,6 +33,8 @@ import org.apache.hadoop.hbase.metrics.Counter;
import org.apache.hadoop.hbase.metrics.Gauge;
import org.apache.hadoop.hbase.metrics.MetricRegistry;
import org.apache.hadoop.hbase.metrics.Timer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* An example coprocessor that collects some metrics to demonstrate the usage of exporting custom
@@ -53,7 +53,7 @@ public class ExampleMasterObserverWithMetrics implements MasterCoprocessor, Mast
return Optional.of(this);
}
- private static final Log LOG = LogFactory.getLog(ExampleMasterObserverWithMetrics.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ExampleMasterObserverWithMetrics.class);
/** This is the Timer metric object to keep track of the current count across invocations */
private Timer createTableTimer;
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RefreshHFilesEndpoint.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RefreshHFilesEndpoint.java
index 71d40d43ec..60cb1542fd 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RefreshHFilesEndpoint.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RefreshHFilesEndpoint.java
@@ -21,8 +21,7 @@ package org.apache.hadoop.hbase.coprocessor.example;
import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
import com.google.protobuf.Service;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.CoprocessorException;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
@@ -30,6 +29,8 @@ import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
import org.apache.hadoop.hbase.protobuf.generated.RefreshHFilesProtos;
import org.apache.hadoop.hbase.regionserver.Store;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Collections;
@@ -44,7 +45,7 @@ import java.util.Collections;
*/
public class RefreshHFilesEndpoint extends RefreshHFilesProtos.RefreshHFilesService
implements RegionCoprocessor {
- protected static final Log LOG = LogFactory.getLog(RefreshHFilesEndpoint.class);
+ protected static final Logger LOG = LoggerFactory.getLogger(RefreshHFilesEndpoint.class);
private RegionCoprocessorEnvironment env;
public RefreshHFilesEndpoint() {
diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesEndpoint.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesEndpoint.java
index 46336d53b7..77f98999ea 100644
--- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesEndpoint.java
+++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesEndpoint.java
@@ -25,8 +25,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -55,10 +53,12 @@ import org.apache.hadoop.hbase.wal.WAL;
import org.junit.After;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category(MediumTests.class)
public class TestRefreshHFilesEndpoint {
- private static final Log LOG = LogFactory.getLog(TestRefreshHFilesEndpoint.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRefreshHFilesEndpoint.class);
private static final HBaseTestingUtility HTU = new HBaseTestingUtility();
private static final int NUM_MASTER = 1;
private static final int NUM_RS = 2;
@@ -128,7 +128,7 @@ public class TestRefreshHFilesEndpoint {
if (rex.getCause() instanceof IOException)
throw new IOException();
} catch (Throwable ex) {
- LOG.error(ex);
+ LOG.error(ex.toString(), ex);
fail("Couldn't call the RefreshRegionHFilesEndpoint");
}
}
diff --git a/hbase-external-blockcache/pom.xml b/hbase-external-blockcache/pom.xml
index 26e4aea188..46e58c2ee5 100644
--- a/hbase-external-blockcache/pom.xml
+++ b/hbase-external-blockcache/pom.xml
@@ -155,8 +155,8 @@
true
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apiorg.apache.htrace
diff --git a/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java b/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java
index b12ac1dd63..a523663381 100644
--- a/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java
+++ b/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java
@@ -29,8 +29,6 @@ import java.util.List;
import java.util.NoSuchElementException;
import java.util.concurrent.ExecutionException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;
@@ -40,6 +38,8 @@ import org.apache.hadoop.hbase.trace.TraceUtil;
import org.apache.hadoop.hbase.util.Addressing;
import org.apache.htrace.core.TraceScope;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import net.spy.memcached.CachedData;
import net.spy.memcached.ConnectionFactoryBuilder;
@@ -55,7 +55,7 @@ import net.spy.memcached.transcoders.Transcoder;
*/
@InterfaceAudience.Private
public class MemcachedBlockCache implements BlockCache {
- private static final Log LOG = LogFactory.getLog(MemcachedBlockCache.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(MemcachedBlockCache.class.getName());
// Some memcache versions won't take more than 1024 * 1024. So set the limit below
// that just in case this client is used with those versions.
diff --git a/hbase-hadoop-compat/pom.xml b/hbase-hadoop-compat/pom.xml
index 47ff69bc83..08ac22dd87 100644
--- a/hbase-hadoop-compat/pom.xml
+++ b/hbase-hadoop-compat/pom.xml
@@ -88,8 +88,8 @@
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apiorg.apache.hbase
diff --git a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilityFactory.java b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilityFactory.java
index 83053164e5..5c1f1035f5 100644
--- a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilityFactory.java
+++ b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilityFactory.java
@@ -18,18 +18,18 @@
package org.apache.hadoop.hbase;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
import java.util.Iterator;
import java.util.ServiceLoader;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
/**
* Class that will create many instances of classes provided by the hbase-hadoop{1|2}-compat jars.
*/
public class CompatibilityFactory {
- private static final Log LOG = LogFactory.getLog(CompatibilitySingletonFactory.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CompatibilitySingletonFactory.class);
public static final String EXCEPTION_START = "Could not create ";
public static final String EXCEPTION_END = " Is the hadoop compatibility jar on the classpath?";
@@ -54,7 +54,7 @@ public class CompatibilityFactory {
msg.append(it.next()).append(" ");
}
msg.append("}");
- LOG.warn(msg);
+ LOG.warn(msg.toString());
}
} catch (Exception e) {
throw new RuntimeException(createExceptionString(klass), e);
diff --git a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilitySingletonFactory.java b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilitySingletonFactory.java
index be6d6d1809..3dc3f49655 100644
--- a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilitySingletonFactory.java
+++ b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilitySingletonFactory.java
@@ -18,14 +18,14 @@
package org.apache.hadoop.hbase;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.ServiceLoader;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
/**
* Factory for classes supplied by hadoop compatibility modules. Only one of each class will be
* created.
@@ -36,7 +36,7 @@ public class CompatibilitySingletonFactory extends CompatibilityFactory {
private final Object lock = new Object();
private final Map instances = new HashMap<>();
}
- private static final Log LOG = LogFactory.getLog(CompatibilitySingletonFactory.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CompatibilitySingletonFactory.class);
/**
* This is a static only class don't let anyone create an instance.
@@ -67,7 +67,7 @@ public class CompatibilitySingletonFactory extends CompatibilityFactory {
msg.append(it.next()).append(" ");
}
msg.append("}");
- LOG.warn(msg);
+ LOG.warn(msg.toString());
}
} catch (Exception e) {
throw new RuntimeException(createExceptionString(klass), e);
diff --git a/hbase-hadoop2-compat/pom.xml b/hbase-hadoop2-compat/pom.xml
index b2614357b0..0a99090b3c 100644
--- a/hbase-hadoop2-compat/pom.xml
+++ b/hbase-hadoop2-compat/pom.xml
@@ -169,8 +169,8 @@ limitations under the License.
commons-lang3
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apiorg.apache.hbase.thirdparty
diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/mapreduce/JobUtil.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/mapreduce/JobUtil.java
index 0400f7f2c9..69892851bb 100644
--- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/mapreduce/JobUtil.java
+++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/mapreduce/JobUtil.java
@@ -20,10 +20,10 @@ package org.apache.hadoop.hbase.mapreduce;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.Cluster;
@@ -35,7 +35,7 @@ import org.apache.hadoop.mapreduce.JobSubmissionFiles;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public abstract class JobUtil {
- private static final Log LOG = LogFactory.getLog(JobUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(JobUtil.class);
protected JobUtil() {
super();
diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/impl/GlobalMetricRegistriesAdapter.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/impl/GlobalMetricRegistriesAdapter.java
index 7db26a512b..0ad5d14bca 100644
--- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/impl/GlobalMetricRegistriesAdapter.java
+++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/impl/GlobalMetricRegistriesAdapter.java
@@ -26,8 +26,6 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.Optional;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.metrics.MetricRegistries;
import org.apache.hadoop.hbase.metrics.MetricRegistry;
import org.apache.hadoop.hbase.metrics.MetricRegistryInfo;
@@ -38,7 +36,8 @@ import org.apache.hadoop.metrics2.impl.JmxCacheBuster;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystemHelper;
import org.apache.hadoop.metrics2.lib.MetricsExecutorImpl;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -65,7 +64,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
public class GlobalMetricRegistriesAdapter {
- private static final Log LOG = LogFactory.getLog(GlobalMetricRegistriesAdapter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(GlobalMetricRegistriesAdapter.class);
private class MetricsSourceAdapter implements MetricsSource {
private final MetricRegistry registry;
diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/impl/HBaseMetrics2HadoopMetricsAdapter.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/impl/HBaseMetrics2HadoopMetricsAdapter.java
index 3a24b94de5..b6a17cf177 100644
--- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/impl/HBaseMetrics2HadoopMetricsAdapter.java
+++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/impl/HBaseMetrics2HadoopMetricsAdapter.java
@@ -35,8 +35,6 @@ package org.apache.hadoop.hbase.metrics.impl;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.metrics.Counter;
import org.apache.hadoop.hbase.metrics.Gauge;
import org.apache.hadoop.hbase.metrics.Histogram;
@@ -50,6 +48,8 @@ import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.lib.Interns;
import org.apache.hadoop.metrics2.lib.MutableHistogram;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This is the adapter from "HBase Metrics Framework", implemented in hbase-metrics-api and
@@ -60,8 +60,8 @@ import org.apache.hadoop.metrics2.lib.MutableHistogram;
* Some of the code is forked from https://github.com/joshelser/dropwizard-hadoop-metrics2.
*/
public class HBaseMetrics2HadoopMetricsAdapter {
- private static final Log LOG
- = LogFactory.getLog(HBaseMetrics2HadoopMetricsAdapter.class);
+ private static final Logger LOG
+ = LoggerFactory.getLogger(HBaseMetrics2HadoopMetricsAdapter.class);
private static final String EMPTY_STRING = "";
public HBaseMetrics2HadoopMetricsAdapter() {
diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionAggregateSourceImpl.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionAggregateSourceImpl.java
index b03549dfa0..cb78ccf183 100644
--- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionAggregateSourceImpl.java
+++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionAggregateSourceImpl.java
@@ -23,9 +23,9 @@ import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.metrics.BaseSourceImpl;
import org.apache.hadoop.hbase.metrics.Interns;
import org.apache.hadoop.metrics2.MetricsCollector;
@@ -37,7 +37,7 @@ import org.apache.hadoop.metrics2.lib.MetricsExecutorImpl;
public class MetricsRegionAggregateSourceImpl extends BaseSourceImpl
implements MetricsRegionAggregateSource {
- private static final Log LOG = LogFactory.getLog(MetricsRegionAggregateSourceImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MetricsRegionAggregateSourceImpl.class);
private final MetricsExecutorImpl executor = new MetricsExecutorImpl();
diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
index 09175d5dbe..8f11811a94 100644
--- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
+++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.regionserver;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.metrics.Interns;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry;
@@ -31,7 +31,7 @@ import org.apache.hadoop.metrics2.lib.MutableFastCounter;
@InterfaceAudience.Private
public class MetricsRegionSourceImpl implements MetricsRegionSource {
- private static final Log LOG = LogFactory.getLog(MetricsRegionSourceImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MetricsRegionSourceImpl.class);
private AtomicBoolean closed = new AtomicBoolean(false);
diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsTableAggregateSourceImpl.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsTableAggregateSourceImpl.java
index 5ef8d81246..588986e7c4 100644
--- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsTableAggregateSourceImpl.java
+++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsTableAggregateSourceImpl.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.regionserver;
import java.util.concurrent.ConcurrentHashMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.metrics.BaseSourceImpl;
import org.apache.hadoop.hbase.metrics.Interns;
import org.apache.hadoop.metrics2.MetricsCollector;
@@ -32,7 +32,7 @@ import org.apache.hadoop.metrics2.MetricsRecordBuilder;
public class MetricsTableAggregateSourceImpl extends BaseSourceImpl
implements MetricsTableAggregateSource {
- private static final Log LOG = LogFactory.getLog(MetricsTableAggregateSourceImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MetricsTableAggregateSourceImpl.class);
private ConcurrentHashMap tableSources = new ConcurrentHashMap<>();
public MetricsTableAggregateSourceImpl() {
diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsTableSourceImpl.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsTableSourceImpl.java
index f95eb4c597..2269d9ab93 100644
--- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsTableSourceImpl.java
+++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsTableSourceImpl.java
@@ -20,10 +20,10 @@ package org.apache.hadoop.hbase.regionserver;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.metrics.Interns;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry;
@@ -31,7 +31,7 @@ import org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry;
@InterfaceAudience.Private
public class MetricsTableSourceImpl implements MetricsTableSource {
- private static final Log LOG = LogFactory.getLog(MetricsTableSourceImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MetricsTableSourceImpl.class);
private AtomicBoolean closed = new AtomicBoolean(false);
diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
index 00763c67d0..dc5608014e 100644
--- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
+++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
@@ -22,9 +22,9 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.metrics2.MetricsExecutor;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.metrics2.lib.MetricsExecutorImpl;
@@ -42,7 +42,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public class JmxCacheBuster {
- private static final Log LOG = LogFactory.getLog(JmxCacheBuster.class);
+ private static final Logger LOG = LoggerFactory.getLogger(JmxCacheBuster.class);
private static AtomicReference fut = new AtomicReference<>(null);
private static MetricsExecutor executor = new MetricsExecutorImpl();
private static AtomicBoolean stopped = new AtomicBoolean(false);
diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystemHelper.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystemHelper.java
index eb465c38c8..3ecd8887ef 100644
--- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystemHelper.java
+++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystemHelper.java
@@ -20,13 +20,12 @@ package org.apache.hadoop.metrics2.lib;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.HashMap;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class DefaultMetricsSystemHelper {
- private static final Log LOG = LogFactory.getLog(DefaultMetricsSystemHelper.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DefaultMetricsSystemHelper.class);
private final Method removeObjectMethod;
private final Field sourceNamesField;
private final Field mapField;
@@ -49,7 +48,7 @@ public class DefaultMetricsSystemHelper {
f2 = UniqueNames.class.getDeclaredField("map");
f2.setAccessible(true);
} catch (NoSuchFieldException e) {
- LOG.trace(e);
+ LOG.trace(e.toString(), e);
f1 = null;
f2 = null;
}
diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DynamicMetricsRegistry.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DynamicMetricsRegistry.java
index a7d221b3b5..0b8111bb65 100644
--- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DynamicMetricsRegistry.java
+++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DynamicMetricsRegistry.java
@@ -21,9 +21,9 @@ package org.apache.hadoop.metrics2.lib;
import java.util.Collection;
import java.util.concurrent.ConcurrentMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.metrics.Interns;
import org.apache.hadoop.metrics2.MetricsException;
import org.apache.hadoop.metrics2.MetricsInfo;
@@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
*/
@InterfaceAudience.Private
public class DynamicMetricsRegistry {
- private static final Log LOG = LogFactory.getLog(DynamicMetricsRegistry.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DynamicMetricsRegistry.class);
private final ConcurrentMap metricsMap =
Maps.newConcurrentMap();
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
index c2b5944ba2..087a33f82e 100644
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
+++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
@@ -47,8 +47,6 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletRequestWrapper;
import javax.servlet.http.HttpServletResponse;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@@ -93,6 +91,8 @@ import org.eclipse.jetty.webapp.WebAppContext;
import org.glassfish.jersey.server.ResourceConfig;
import org.glassfish.jersey.servlet.ServletContainer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Create a Jetty embedded server to answer http requests. The primary goal
@@ -105,7 +105,7 @@ import org.glassfish.jersey.servlet.ServletContainer;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class HttpServer implements FilterContainer {
- private static final Log LOG = LogFactory.getLog(HttpServer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HttpServer.class);
private static final String EMPTY_STRING = "";
private static final int DEFAULT_MAX_HEADER_SIZE = 64 * 1024; // 64K
diff --git a/hbase-it/pom.xml b/hbase-it/pom.xml
index 3cd3e6c28d..f75024de6d 100644
--- a/hbase-it/pom.xml
+++ b/hbase-it/pom.xml
@@ -236,8 +236,8 @@
metrics-core
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apicommons-cli
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java
index e8a00416e5..6546ac9288 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java
@@ -22,6 +22,7 @@ import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
+import java.util.Objects;
import java.util.Set;
import java.util.TreeSet;
@@ -422,7 +423,7 @@ public class DistributedHBaseCluster extends HBaseCluster {
LOG.warn("Restoring cluster - restoring region servers reported "
+ deferred.size() + " errors:");
for (int i=0; i columnFamilies;
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/PolicyBasedChaosMonkey.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/PolicyBasedChaosMonkey.java
index 70452bb556..a49f54117e 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/PolicyBasedChaosMonkey.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/PolicyBasedChaosMonkey.java
@@ -23,18 +23,18 @@ import java.util.Collection;
import java.util.List;
import org.apache.commons.lang3.RandomUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.chaos.policies.Policy;
import org.apache.hadoop.hbase.util.Pair;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Chaos monkey that given multiple policies will run actions against the cluster.
*/
public class PolicyBasedChaosMonkey extends ChaosMonkey {
- private static final Log LOG = LogFactory.getLog(PolicyBasedChaosMonkey.class);
+ private static final Logger LOG = LoggerFactory.getLogger(PolicyBasedChaosMonkey.class);
private static final long ONE_SEC = 1000;
private static final long FIVE_SEC = 5 * ONE_SEC;
private static final long ONE_MIN = 60 * ONE_SEC;
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/Policy.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/Policy.java
index 6b365f81a5..81267a6568 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/Policy.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/Policy.java
@@ -18,18 +18,18 @@
package org.apache.hadoop.hbase.chaos.policies;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.chaos.actions.Action;
import org.apache.hadoop.hbase.util.StoppableImplementation;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A policy to introduce chaos to the cluster
*/
public abstract class Policy extends StoppableImplementation implements Runnable {
- protected static final Log LOG = LogFactory.getLog(Policy.class);
+ protected static final Logger LOG = LoggerFactory.getLogger(Policy.class);
protected PolicyContext context;
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/ChaosMonkeyRunner.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/ChaosMonkeyRunner.java
index d72111f0ef..8385c15628 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/ChaosMonkeyRunner.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/ChaosMonkeyRunner.java
@@ -23,8 +23,6 @@ import java.util.Set;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
@@ -34,11 +32,12 @@ import org.apache.hadoop.hbase.chaos.factories.MonkeyFactory;
import org.apache.hadoop.hbase.chaos.monkies.ChaosMonkey;
import org.apache.hadoop.hbase.util.AbstractHBaseTool;
import org.apache.hadoop.util.ToolRunner;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
public class ChaosMonkeyRunner extends AbstractHBaseTool {
- private static final Log LOG = LogFactory.getLog(ChaosMonkeyRunner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ChaosMonkeyRunner.class);
public static final String MONKEY_LONG_OPT = "monkey";
public static final String CHAOS_MONKEY_PROPS = "monkeyProps";
@@ -75,7 +74,7 @@ public class ChaosMonkeyRunner extends AbstractHBaseTool {
monkeyProps.load(this.getClass().getClassLoader()
.getResourceAsStream(chaosMonkeyPropsFile));
} catch (IOException e) {
- LOG.warn(e);
+ LOG.warn(e.toString(), e);
System.exit(EXIT_FAILURE);
}
}
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/Monkeys.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/Monkeys.java
index 1ce4356569..123b872fb3 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/Monkeys.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/Monkeys.java
@@ -24,19 +24,19 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This class can be used to control chaos monkeys life cycle.
*/
public class Monkeys implements Closeable {
- private static final Log LOG = LogFactory.getLog(Monkeys.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Monkeys.class);
private final Configuration conf;
private final ChaosMonkeyRunner monkeyRunner;
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java
index 27a2d8506f..9754d4eac6 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java
@@ -37,8 +37,6 @@ import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.codec.Codec;
@@ -51,13 +49,14 @@ import org.apache.hadoop.hbase.util.Threads;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@Category(IntegrationTests.class)
public class IntegrationTestRpcClient {
- private static final Log LOG = LogFactory.getLog(IntegrationTestRpcClient.class);
+ private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestRpcClient.class);
private final Configuration conf;
@@ -203,7 +202,7 @@ public class IntegrationTestRpcClient {
try {
cluster.startServer();
} catch (Exception e) {
- LOG.warn(e);
+ LOG.warn(e.toString(), e);
exception.compareAndSet(null, e);
}
} else {
@@ -211,7 +210,7 @@ public class IntegrationTestRpcClient {
try {
cluster.stopRandomServer();
} catch (Exception e) {
- LOG.warn(e);
+ LOG.warn(e.toString(), e);
exception.compareAndSet(null, e);
}
}
@@ -261,7 +260,7 @@ public class IntegrationTestRpcClient {
BlockingInterface stub = newBlockingStub(rpcClient, server.getListenerAddress());
ret = stub.echo(null, param);
} catch (Exception e) {
- LOG.warn(e);
+ LOG.warn(e.toString(), e);
continue; // expected in case connection is closing or closed
}
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java
index 3fa1054d95..2588e635a1 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java
@@ -34,8 +34,6 @@ import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.lang3.RandomStringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
@@ -88,7 +86,8 @@ import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.ToolRunner;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
@@ -128,7 +127,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
@Category(IntegrationTests.class)
public class IntegrationTestBulkLoad extends IntegrationTestBase {
- private static final Log LOG = LogFactory.getLog(IntegrationTestBulkLoad.class);
+ private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestBulkLoad.class);
private static final byte[] CHAIN_FAM = Bytes.toBytes("L");
private static final byte[] SORT_FAM = Bytes.toBytes("S");
@@ -197,7 +196,7 @@ public class IntegrationTestBulkLoad extends IntegrationTestBase {
Thread.sleep(sleepTime.get());
}
} catch (InterruptedException e1) {
- LOG.error(e1);
+ LOG.error(e1.toString(), e1);
}
}
}
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java
index dfc54e0b33..ab5f2bb827 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java
@@ -30,8 +30,6 @@ import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
@@ -58,6 +56,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Validate ImportTsv + LoadIncrementalHFiles on a distributed cluster.
@@ -66,7 +66,7 @@ import org.junit.rules.TestName;
public class IntegrationTestImportTsv extends Configured implements Tool {
private static final String NAME = IntegrationTestImportTsv.class.getSimpleName();
- private static final Log LOG = LogFactory.getLog(IntegrationTestImportTsv.class);
+ private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestImportTsv.class);
protected static final String simple_tsv =
"row1\t1\tc1\tc2\n" +
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java
index 2df1c4bff1..84e40f50a0 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.mapreduce;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -34,6 +32,8 @@ import org.apache.hadoop.util.ToolRunner;
import org.junit.After;
import org.junit.Before;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* An integration test to test {@link TableSnapshotInputFormat} which enables
@@ -69,7 +69,7 @@ import org.junit.experimental.categories.Category;
// Not runnable as a unit test. See TestTableSnapshotInputFormat
public class IntegrationTestTableSnapshotInputFormat extends IntegrationTestBase {
- private static final Log LOG = LogFactory.getLog(IntegrationTestTableSnapshotInputFormat.class);
+ private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestTableSnapshotInputFormat.class);
private static final String TABLE_NAME_KEY = "IntegrationTestTableSnapshotInputFormat.table";
private static final String DEFAULT_TABLE_NAME = "IntegrationTestTableSnapshotInputFormat";
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
index e45baf151c..bb31ece973 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
@@ -30,8 +30,6 @@ import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import org.apache.commons.lang3.RandomStringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.IntegrationTestingUtility;
@@ -74,6 +72,8 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Integration test that should benchmark how fast HBase can recover from failures. This test starts
@@ -121,7 +121,7 @@ public class IntegrationTestMTTR {
* Constants.
*/
private static final byte[] FAMILY = Bytes.toBytes("d");
- private static final Log LOG = LogFactory.getLog(IntegrationTestMTTR.class);
+ private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestMTTR.class);
private static long sleepTime;
private static final String SLEEP_TIME_KEY = "hbase.IntegrationTestMTTR.sleeptime";
private static final long SLEEP_TIME_DEFAULT = 60 * 1000l;
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java
index b10e54a74f..f5f2ff99f2 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java
@@ -19,8 +19,6 @@
*/
package org.apache.hadoop.hbase.rsgroup;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.Waiter;
@@ -29,6 +27,8 @@ import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.junit.After;
import org.junit.Before;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Runs all of the units tests defined in TestGroupBase as an integration test.
@@ -36,7 +36,7 @@ import org.junit.experimental.categories.Category;
*/
@Category(IntegrationTests.class)
public class IntegrationTestRSGroup extends TestRSGroupsBase {
- private final static Log LOG = LogFactory.getLog(IntegrationTestRSGroup.class);
+ private final static Logger LOG = LoggerFactory.getLogger(IntegrationTestRSGroup.class);
private static boolean initialized = false;
@Before
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
index b9cc69d9bf..826db07552 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
@@ -42,8 +42,6 @@ import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
@@ -119,7 +117,8 @@ import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
/**
@@ -253,7 +252,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase {
*/
static class Generator extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(Generator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Generator.class);
/**
* Set this configuration if you want to test single-column family flush works. If set, we will
@@ -854,7 +853,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase {
* WALs and oldWALs dirs (Some of this is TODO).
*/
static class Search extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(Search.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Search.class);
protected Job job;
private static void printUsage(final String error) {
@@ -914,7 +913,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase {
try {
LOG.info("Found cell=" + cell + " , walKey=" + context.getCurrentKey());
} catch (IOException|InterruptedException e) {
- LOG.warn(e);
+ LOG.warn(e.toString(), e);
}
if (rows.addAndGet(1) < MISSING_ROWS_TO_LOG) {
context.getCounter(FOUND_GROUP_KEY, keyStr).increment(1);
@@ -1016,7 +1015,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase {
*/
static class Verify extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(Verify.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Verify.class);
protected static final BytesWritable DEF = new BytesWritable(new byte[] { 0 });
protected static final BytesWritable DEF_LOST_FAMILIES = new BytesWritable(new byte[] { 1 });
@@ -1455,7 +1454,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase {
*/
static class Loop extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(Loop.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Loop.class);
private static final String USAGE = "Usage: Loop " +
"
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apiorg.apache.zookeeper
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java
index 91ef71404d..b0674bf401 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java
@@ -19,12 +19,11 @@
package org.apache.hadoop.hbase.mapred;
import java.io.IOException;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.RegionLocator;
@@ -44,28 +43,30 @@ import org.apache.hadoop.mapred.Partitioner;
@InterfaceAudience.Public
public class HRegionPartitioner
implements Partitioner {
- private static final Log LOG = LogFactory.getLog(HRegionPartitioner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HRegionPartitioner.class);
// Connection and locator are not cleaned up; they just die when partitioner is done.
private Connection connection;
private RegionLocator locator;
private byte[][] startKeys;
+ @Override
public void configure(JobConf job) {
try {
this.connection = ConnectionFactory.createConnection(HBaseConfiguration.create(job));
TableName tableName = TableName.valueOf(job.get(TableOutputFormat.OUTPUT_TABLE));
this.locator = this.connection.getRegionLocator(tableName);
} catch (IOException e) {
- LOG.error(e);
+ LOG.error(e.toString(), e);
}
try {
this.startKeys = this.locator.getStartKeys();
} catch (IOException e) {
- LOG.error(e);
+ LOG.error(e.toString(), e);
}
}
+ @Override
public int getPartition(ImmutableBytesWritable key, V2 value, int numPartitions) {
byte[] region = null;
// Only one region return 0
@@ -77,7 +78,7 @@ implements Partitioner {
// here if a region splits while mapping
region = locator.getRegionLocation(key.get()).getRegionInfo().getStartKey();
} catch (IOException e) {
- LOG.error(e);
+ LOG.error(e.toString(), e);
}
for (int i = 0; i < this.startKeys.length; i++){
if (Bytes.compareTo(region, this.startKeys[i]) == 0 ){
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java
index 3460fe7df8..ba1df4c3a8 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java
@@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.mapred;
import java.io.IOException;
import java.util.Iterator;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.mapred.MapReduceBase;
@@ -38,8 +38,8 @@ public class IdentityTableReduce
extends MapReduceBase
implements TableReduce {
@SuppressWarnings("unused")
- private static final Log LOG =
- LogFactory.getLog(IdentityTableReduce.class.getName());
+ private static final Logger LOG =
+ LoggerFactory.getLogger(IdentityTableReduce.class.getName());
/**
* No aggregation, output pairs of (key, record)
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java
index cbd72362ee..d9bb66bdf0 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.mapred;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
@@ -39,7 +39,7 @@ import org.apache.hadoop.util.StringUtils;
@InterfaceAudience.Public
public class TableInputFormat extends TableInputFormatBase implements
JobConfigurable {
- private static final Log LOG = LogFactory.getLog(TableInputFormat.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableInputFormat.class);
/**
* space delimited list of columns
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java
index 48ee763cf2..509972e92a 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java
@@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.mapred;
import java.io.Closeable;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
@@ -79,7 +79,7 @@ import org.apache.hadoop.mapred.Reporter;
@InterfaceAudience.Public
public abstract class TableInputFormatBase
implements InputFormat {
- private static final Log LOG = LogFactory.getLog(TableInputFormatBase.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableInputFormatBase.class);
private byte [][] inputColumns;
private Table table;
private RegionLocator regionLocator;
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java
index 95be24f195..a49d0ec5c3 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java
@@ -19,10 +19,9 @@
package org.apache.hadoop.hbase.mapred;
import java.io.IOException;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
@@ -43,7 +42,7 @@ import static org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl.LOG_PER_RO
*/
@InterfaceAudience.Public
public class TableRecordReaderImpl {
- private static final Log LOG = LogFactory.getLog(TableRecordReaderImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableRecordReaderImpl.class);
private byte [] startRow;
private byte [] endRow;
@@ -248,7 +247,7 @@ public class TableRecordReaderImpl {
long now = System.currentTimeMillis();
LOG.info("Mapper took " + (now-timestamp)
+ "ms to process " + rowcount + " rows");
- LOG.info(ioe);
+ LOG.info(ioe.toString(), ioe);
String lastRow = lastSuccessfulRow == null ?
"null" : Bytes.toStringBinary(lastSuccessfulRow);
LOG.info("lastSuccessfulRow=" + lastRow);
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
index 48cc0d5a07..6d6125f033 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
@@ -20,11 +20,11 @@ package org.apache.hadoop.hbase.mapreduce;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
@@ -72,8 +72,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
*/
@InterfaceAudience.Public
public class CellCounter extends Configured implements Tool {
- private static final Log LOG =
- LogFactory.getLog(CellCounter.class.getName());
+ private static final Logger LOG =
+ LoggerFactory.getLogger(CellCounter.class.getName());
/**
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java
index 81af16580c..2e9e62cf37 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java
@@ -23,8 +23,6 @@ import java.util.HashMap;
import java.util.Map;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -32,6 +30,8 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -49,7 +49,7 @@ import org.apache.hadoop.util.ToolRunner;
*/
@InterfaceAudience.Public
public class CopyTable extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(CopyTable.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CopyTable.class);
final static String NAME = "copytable";
long startTime = 0;
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/DefaultVisibilityExpressionResolver.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/DefaultVisibilityExpressionResolver.java
index 775739faa8..cc3072013f 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/DefaultVisibilityExpressionResolver.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/DefaultVisibilityExpressionResolver.java
@@ -26,12 +26,12 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.Tag;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Result;
@@ -50,7 +50,7 @@ import org.apache.hadoop.hbase.util.Bytes;
*/
@InterfaceAudience.Private
public class DefaultVisibilityExpressionResolver implements VisibilityExpressionResolver {
- private static final Log LOG = LogFactory.getLog(DefaultVisibilityExpressionResolver.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DefaultVisibilityExpressionResolver.class);
private Configuration conf;
private final Map labels = new HashMap<>();
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ExportUtils.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ExportUtils.java
index 7107537223..34f33983bd 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ExportUtils.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ExportUtils.java
@@ -21,14 +21,15 @@ package org.apache.hadoop.hbase.mapreduce;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.Filter;
@@ -48,7 +49,7 @@ import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
*/
@InterfaceAudience.Private
public final class ExportUtils {
- private static final Log LOG = LogFactory.getLog(ExportUtils.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ExportUtils.class);
public static final String RAW_SCAN = "hbase.mapreduce.include.deleted.rows";
public static final String EXPORT_BATCHING = "hbase.export.scanner.batch";
public static final String EXPORT_CACHING = "hbase.export.scanner.caching";
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
index ffe1c85b1d..9bd0530226 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
@@ -40,8 +40,6 @@ import java.util.function.Function;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -91,6 +89,8 @@ import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -105,7 +105,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
@InterfaceAudience.Public
public class HFileOutputFormat2
extends FileOutputFormat {
- private static final Log LOG = LogFactory.getLog(HFileOutputFormat2.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HFileOutputFormat2.class);
static class TableInfo {
private TableDescriptor tableDesctiptor;
private RegionLocator regionLocator;
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java
index d8c2314b22..b48ecf02a0 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java
@@ -19,10 +19,9 @@
package org.apache.hadoop.hbase.mapreduce;
import java.io.IOException;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -53,7 +52,7 @@ public class HRegionPartitioner
extends Partitioner
implements Configurable {
- private static final Log LOG = LogFactory.getLog(HRegionPartitioner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HRegionPartitioner.class);
private Configuration conf = null;
// Connection and locator are not cleaned up; they just die when partitioner is done.
private Connection connection;
@@ -86,7 +85,7 @@ implements Configurable {
// here if a region splits while mapping
region = this.locator.getRegionLocation(key.get()).getRegionInfo().getStartKey();
} catch (IOException e) {
- LOG.error(e);
+ LOG.error(e.toString(), e);
}
for (int i = 0; i < this.startKeys.length; i++){
if (Bytes.compareTo(region, this.startKeys[i]) == 0 ){
@@ -129,12 +128,12 @@ implements Configurable {
TableName tableName = TableName.valueOf(conf.get(TableOutputFormat.OUTPUT_TABLE));
this.locator = this.connection.getRegionLocator(tableName);
} catch (IOException e) {
- LOG.error(e);
+ LOG.error(e.toString(), e);
}
try {
this.startKeys = this.locator.getStartKeys();
} catch (IOException e) {
- LOG.error(e);
+ LOG.error(e.toString(), e);
}
}
}
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java
index 2c8caf503a..e68ac3b354 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java
@@ -27,8 +27,6 @@ import java.util.Collections;
import java.util.List;
import java.util.Properties;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FSDataInputStream;
@@ -56,14 +54,15 @@ import org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Charsets;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Ordering;
public class HashTable extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(HashTable.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HashTable.class);
private static final int DEFAULT_BATCH_SIZE = 8000;
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.java
index 76c1f607be..876953c862 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.mapreduce;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.io.Writable;
@@ -53,7 +53,7 @@ public class IdentityTableReducer
extends TableReducer {
@SuppressWarnings("unused")
- private static final Log LOG = LogFactory.getLog(IdentityTableReducer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(IdentityTableReducer.class);
/**
* Writes each given record, consisting of the row key and the given values,
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
index bf0081b28a..e8e1de0093 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
@@ -33,8 +33,6 @@ import java.util.Map;
import java.util.TreeMap;
import java.util.UUID;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
@@ -79,6 +77,8 @@ import org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
@@ -86,7 +86,7 @@ import org.apache.zookeeper.KeeperException;
*/
@InterfaceAudience.Public
public class Import extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(Import.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Import.class);
final static String NAME = "import";
public final static String CF_RENAME_PROP = "HBASE_IMPORTER_RENAME_CFS";
public final static String BULK_OUTPUT_CONF_KEY = "import.bulk.output";
@@ -192,7 +192,7 @@ public class Import extends Configured implements Tool {
extends TableMapper {
private Map cfRenameMap;
private Filter filter;
- private static final Log LOG = LogFactory.getLog(CellImporter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CellImporter.class);
/**
* @param row The current table row key.
@@ -256,7 +256,7 @@ public class Import extends Configured implements Tool {
public static class CellImporter extends TableMapper {
private Map cfRenameMap;
private Filter filter;
- private static final Log LOG = LogFactory.getLog(CellImporter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CellImporter.class);
/**
* @param row The current table row key.
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
index d672803b4b..678377d582 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
@@ -27,8 +27,6 @@ import java.util.HashSet;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
@@ -40,6 +38,8 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotEnabledException;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -77,7 +77,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@InterfaceAudience.Public
public class ImportTsv extends Configured implements Tool {
- protected static final Log LOG = LogFactory.getLog(ImportTsv.class);
+ protected static final Logger LOG = LoggerFactory.getLogger(ImportTsv.class);
final static String NAME = "importtsv";
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.java
index b7e9479973..03834f2b8e 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.java
@@ -18,9 +18,9 @@
package org.apache.hadoop.hbase.mapreduce;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
@@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
@InterfaceAudience.Public
@VisibleForTesting
public class MultiTableHFileOutputFormat extends HFileOutputFormat2 {
- private static final Log LOG = LogFactory.getLog(MultiTableHFileOutputFormat.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MultiTableHFileOutputFormat.class);
/**
* Creates a composite key to use as a mapper output key when using
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
index 82a86b4047..d8205c1a0c 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
@@ -22,9 +22,9 @@ import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.TableName;
@@ -55,7 +55,7 @@ import java.util.Iterator;
public abstract class MultiTableInputFormatBase extends
InputFormat {
- private static final Log LOG = LogFactory.getLog(MultiTableInputFormatBase.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MultiTableInputFormatBase.class);
/** Holds the set of scans used to define the input. */
private List scans;
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java
index 4cf50f23f1..2a4fae9440 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java
@@ -22,9 +22,9 @@ import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
@@ -72,7 +72,7 @@ public class MultiTableOutputFormat extends OutputFormat {
- private static final Log LOG = LogFactory.getLog(MultiTableRecordWriter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MultiTableRecordWriter.class);
Connection connection;
Map mutatorMap = new HashMap<>();
Configuration conf;
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormatImpl.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormatImpl.java
index b5cba645a8..316b26e1c1 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormatImpl.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormatImpl.java
@@ -20,14 +20,14 @@ package org.apache.hadoop.hbase.mapreduce;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper;
import org.apache.hadoop.hbase.snapshot.SnapshotManifest;
@@ -51,7 +51,7 @@ import java.util.UUID;
@InterfaceStability.Evolving
public class MultiTableSnapshotInputFormatImpl {
- private static final Log LOG = LogFactory.getLog(MultiTableSnapshotInputFormatImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MultiTableSnapshotInputFormatImpl.class);
public static final String RESTORE_DIRS_KEY =
"hbase.MultiTableSnapshotInputFormat.restore.snapshotDirMapping";
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java
index a505379664..626deffda4 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java
@@ -23,8 +23,6 @@ import java.lang.reflect.Method;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@@ -41,6 +39,8 @@ import org.apache.hadoop.mapreduce.StatusReporter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.util.ReflectionUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
@@ -58,7 +58,7 @@ import org.apache.hadoop.util.ReflectionUtils;
*/
public class MultithreadedTableMapper extends TableMapper {
- private static final Log LOG = LogFactory.getLog(MultithreadedTableMapper.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MultithreadedTableMapper.class);
private Class extends Mapper> mapClass;
private Context outer;
private ExecutorService executor;
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutCombiner.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutCombiner.java
index 7da2f9b3b3..317b328df7 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutCombiner.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutCombiner.java
@@ -23,9 +23,9 @@ import java.util.List;
import java.util.Map.Entry;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
@@ -39,7 +39,7 @@ import org.apache.hadoop.mapreduce.Reducer;
*/
@InterfaceAudience.Public
public class PutCombiner extends Reducer {
- private static final Log LOG = LogFactory.getLog(PutCombiner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(PutCombiner.class);
@Override
protected void reduce(K row, Iterable vals, Context context)
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RegionSizeCalculator.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RegionSizeCalculator.java
index 33f09cfe00..2768ceceed 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RegionSizeCalculator.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RegionSizeCalculator.java
@@ -25,13 +25,13 @@ import java.util.Set;
import java.util.TreeMap;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.RegionLoad;
import org.apache.hadoop.hbase.ServerName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.util.Bytes;
@@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.util.Bytes;
@InterfaceAudience.Private
public class RegionSizeCalculator {
- private static final Log LOG = LogFactory.getLog(RegionSizeCalculator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionSizeCalculator.class);
/**
* Maps each region to its size in bytes.
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java
index c9f3022bd3..dac1d425d8 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java
@@ -25,13 +25,13 @@ import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
@@ -42,7 +42,7 @@ import org.apache.hadoop.io.serializer.Serializer;
@InterfaceAudience.Public
public class ResultSerialization extends Configured implements Serialization {
- private static final Log LOG = LogFactory.getLog(ResultSerialization.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ResultSerialization.class);
// The following configuration property indicates import file format version.
public static final String IMPORT_FORMAT_VER = "hbase.import.version";
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java
index ea89c928ff..9c7b489181 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java
@@ -22,11 +22,11 @@ import java.io.IOException;
import java.util.List;
import java.util.ArrayList;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -50,7 +50,7 @@ import org.apache.hadoop.util.ToolRunner;
@InterfaceAudience.Public
public class RowCounter extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(RowCounter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RowCounter.class);
/** Name of this 'program'. */
static final String NAME = "rowcounter";
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.java
index ad65e49bc3..1c31eda294 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.java
@@ -18,9 +18,9 @@
*/
package org.apache.hadoop.hbase.mapreduce;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@@ -46,7 +46,7 @@ import org.apache.hadoop.mapreduce.Partitioner;
@InterfaceAudience.Public
public class SimpleTotalOrderPartitioner extends Partitioner
implements Configurable {
- private final static Log LOG = LogFactory.getLog(SimpleTotalOrderPartitioner.class);
+ private final static Logger LOG = LoggerFactory.getLogger(SimpleTotalOrderPartitioner.class);
@Deprecated
public static final String START = "hbase.simpletotalorder.start";
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java
index edef842f5a..bc528fcf83 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java
@@ -21,8 +21,6 @@ import java.io.IOException;
import java.util.Iterator;
import java.util.Collections;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileStatus;
@@ -50,12 +48,13 @@ import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
public class SyncTable extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(SyncTable.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SyncTable.class);
static final String SOURCE_HASH_DIR_CONF_KEY = "sync.table.source.hash.dir";
static final String SOURCE_TABLE_CONF_KEY = "sync.table.source.table.name";
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java
index 9eefac9def..480c6118b0 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java
@@ -23,13 +23,13 @@ import java.util.Collections;
import java.util.List;
import java.util.Locale;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.RegionLocator;
@@ -49,7 +49,7 @@ public class TableInputFormat extends TableInputFormatBase
implements Configurable {
@SuppressWarnings("hiding")
- private static final Log LOG = LogFactory.getLog(TableInputFormat.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableInputFormat.class);
/** Job parameter that specifies the input table. */
public static final String INPUT_TABLE = "hbase.mapreduce.inputtable";
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
index fa2e6a2f81..5acbe2c2c1 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
@@ -28,9 +28,9 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.TableName;
@@ -109,7 +109,7 @@ import org.apache.hadoop.util.StringUtils;
public abstract class TableInputFormatBase
extends InputFormat {
- private static final Log LOG = LogFactory.getLog(TableInputFormatBase.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableInputFormatBase.class);
private static final String NOT_INITIALIZED = "The input format instance has not been properly " +
"initialized. Ensure you call initializeTable either in your constructor or initialize " +
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
index c37f284049..f94110b2ff 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
@@ -33,8 +33,6 @@ import java.util.Set;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -43,6 +41,8 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
@@ -70,7 +70,7 @@ import com.codahale.metrics.MetricRegistry;
@SuppressWarnings({ "rawtypes", "unchecked" })
@InterfaceAudience.Public
public class TableMapReduceUtil {
- private static final Log LOG = LogFactory.getLog(TableMapReduceUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableMapReduceUtil.class);
/**
* Use this before submitting a TableMap job. It will appropriately set up
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.java
index 07a2a08cc1..7598520a48 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.java
@@ -19,10 +19,9 @@
package org.apache.hadoop.hbase.mapreduce;
import java.io.IOException;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -52,7 +51,7 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext;
public class TableOutputFormat extends OutputFormat
implements Configurable {
- private static final Log LOG = LogFactory.getLog(TableOutputFormat.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableOutputFormat.class);
/** Job parameter that specifies the output table. */
public static final String OUTPUT_TABLE = "hbase.mapred.outputtable";
@@ -232,7 +231,7 @@ implements Configurable {
this.conf.setInt(HConstants.ZOOKEEPER_CLIENT_PORT, zkClientPort);
}
} catch(IOException e) {
- LOG.error(e);
+ LOG.error(e.toString(), e);
throw new RuntimeException(e);
}
}
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java
index 511994b50d..40c0e7c627 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java
@@ -20,10 +20,9 @@ package org.apache.hadoop.hbase.mapreduce;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.Map;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
@@ -50,7 +49,7 @@ public class TableRecordReaderImpl {
public static final String LOG_PER_ROW_COUNT
= "hbase.mapreduce.log.scanner.rowcount";
- private static final Log LOG = LogFactory.getLog(TableRecordReaderImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableRecordReaderImpl.class);
// HBASE_COUNTER_GROUP_NAME is the name of mapreduce counter group for HBase
@VisibleForTesting
@@ -254,7 +253,7 @@ public class TableRecordReaderImpl {
long now = System.currentTimeMillis();
LOG.info("Mapper took " + (now-timestamp)
+ "ms to process " + rowcount + " rows");
- LOG.info(ioe);
+ LOG.info(ioe.toString(), ioe);
String lastRow = lastSuccessfulRow == null ?
"null" : Bytes.toStringBinary(lastSuccessfulRow);
LOG.info("lastSuccessfulRow=" + lastRow);
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java
index bee4926ca6..b1a9e49f14 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.mapreduce;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -30,6 +28,8 @@ import org.apache.hadoop.hbase.HDFSBlocksDistribution.HostAndWeight;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.ClientSideRegionScanner;
import org.apache.hadoop.hbase.client.IsolationLevel;
import org.apache.hadoop.hbase.client.Result;
@@ -64,7 +64,7 @@ public class TableSnapshotInputFormatImpl {
// TODO: Snapshots files are owned in fs by the hbase user. There is no
// easy way to delegate access.
- public static final Log LOG = LogFactory.getLog(TableSnapshotInputFormatImpl.class);
+ public static final Logger LOG = LoggerFactory.getLogger(TableSnapshotInputFormatImpl.class);
private static final String SNAPSHOT_NAME_KEY = "hbase.TableSnapshotInputFormat.snapshot.name";
// key for specifying the root dir of the restored snapshot
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java
index 19614afbad..de42c31678 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java
@@ -23,9 +23,9 @@ import java.io.DataOutput;
import java.io.IOException;
import java.util.Arrays;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.client.Scan;
@@ -43,7 +43,7 @@ public class TableSplit extends InputSplit
implements Writable, Comparable {
/** @deprecated LOG variable would be made private. fix in hbase 3.0 */
@Deprecated
- public static final Log LOG = LogFactory.getLog(TableSplit.class);
+ public static final Logger LOG = LoggerFactory.getLogger(TableSplit.class);
// should be < 0 (@see #readFields(DataInput))
// version 1 supports Scan data member
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALInputFormat.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALInputFormat.java
index 796acb925f..1815412721 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALInputFormat.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALInputFormat.java
@@ -26,8 +26,6 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -35,6 +33,8 @@ import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
import org.apache.hadoop.hbase.wal.WAL;
@@ -55,7 +55,7 @@ import org.apache.hadoop.util.StringUtils;
*/
@InterfaceAudience.Public
public class WALInputFormat extends InputFormat {
- private static final Log LOG = LogFactory.getLog(WALInputFormat.class);
+ private static final Logger LOG = LoggerFactory.getLogger(WALInputFormat.class);
public static final String START_TIME_KEY = "wal.start.time";
public static final String END_TIME_KEY = "wal.end.time";
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java
index 15c33cbe7e..7212d4be42 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java
@@ -23,8 +23,6 @@ import java.text.SimpleDateFormat;
import java.util.Map;
import java.util.TreeMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
@@ -52,6 +50,8 @@ import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A tool to replay WAL files as a M/R job.
@@ -65,7 +65,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Public
public class WALPlayer extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(WALPlayer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(WALPlayer.class);
final static String NAME = "WALPlayer";
public final static String BULK_OUTPUT_CONF_KEY = "wal.bulk.output";
public final static String TABLES_KEY = "wal.input.tables";
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java
index b8de9ec088..01df2bd6d3 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.mapreduce.replication;
import java.io.IOException;
import java.util.Arrays;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
@@ -64,7 +62,8 @@ import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -79,8 +78,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
public class VerifyReplication extends Configured implements Tool {
- private static final Log LOG =
- LogFactory.getLog(VerifyReplication.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(VerifyReplication.class);
public final static String NAME = "verifyrep";
private final static String PEER_CONFIG_PREFIX = NAME + ".peer.";
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java
index aec5fa08b9..746bb5ff82 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java
@@ -27,8 +27,6 @@ import java.util.List;
import java.util.Optional;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -63,6 +61,8 @@ import org.apache.hadoop.util.LineReader;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/*
* The CompactionTool allows to execute a compaction specifying a:
@@ -74,7 +74,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
public class CompactionTool extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(CompactionTool.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CompactionTool.class);
private final static String CONF_TMP_DIR = "hbase.tmp.dir";
private final static String CONF_COMPACT_ONCE = "hbase.compactiontool.compact.once";
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
index 462d6bc37a..66e9e3bfc9 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
@@ -32,8 +32,6 @@ import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -73,7 +71,8 @@ import org.apache.hadoop.mapreduce.security.TokenCache;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Tool;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
@@ -93,7 +92,7 @@ public class ExportSnapshot extends AbstractHBaseTool implements Tool {
/** Configuration prefix for overrides for the destination filesystem */
public static final String CONF_DEST_PREFIX = NAME + ".to.";
- private static final Log LOG = LogFactory.getLog(ExportSnapshot.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ExportSnapshot.class);
private static final String MR_NUM_MAPS = "mapreduce.job.maps";
private static final String CONF_NUM_SPLITS = "snapshot.export.format.splits";
@@ -153,7 +152,7 @@ public class ExportSnapshot extends AbstractHBaseTool implements Tool {
private static class ExportMapper extends Mapper {
- private static final Log LOG = LogFactory.getLog(ExportMapper.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ExportMapper.class);
final static int REPORT_SIZE = 1 * 1024 * 1024;
final static int BUFFER_SIZE = 64 * 1024;
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index 4f8b82f4fa..a86c29f044 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -47,8 +47,6 @@ import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
@@ -106,7 +104,8 @@ import org.apache.htrace.core.ProbabilitySampler;
import org.apache.htrace.core.Sampler;
import org.apache.htrace.core.TraceScope;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects;
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
@@ -131,7 +130,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa
public class PerformanceEvaluation extends Configured implements Tool {
static final String RANDOM_SEEK_SCAN = "randomSeekScan";
static final String RANDOM_READ = "randomRead";
- private static final Log LOG = LogFactory.getLog(PerformanceEvaluation.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(PerformanceEvaluation.class.getName());
private static final ObjectMapper MAPPER = new ObjectMapper();
static {
MAPPER.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, true);
@@ -361,7 +360,8 @@ public class PerformanceEvaluation extends Configured implements Tool {
.add("desc", desc)
.add("presplit", opts.presplitRegions)
.add("splitPolicy", opts.splitPolicy)
- .add("replicas", opts.replicas));
+ .add("replicas", opts.replicas)
+ .toString());
}
// remove an existing table
@@ -1989,7 +1989,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
}
static class FilteredScanTest extends TableTest {
- protected static final Log LOG = LogFactory.getLog(FilteredScanTest.class.getName());
+ protected static final Logger LOG = LoggerFactory.getLogger(FilteredScanTest.class.getName());
FilteredScanTest(Connection con, TestOptions options, Status status) {
super(con, options, status);
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestMultiTableSnapshotInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestMultiTableSnapshotInputFormat.java
index 665c547019..79c4cdfb59 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestMultiTableSnapshotInputFormat.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestMultiTableSnapshotInputFormat.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.hbase.mapred;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.client.Result;
@@ -36,6 +34,8 @@ import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.RunningJob;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Iterator;
@@ -47,7 +47,7 @@ import static org.junit.Assert.assertTrue;
public class TestMultiTableSnapshotInputFormat
extends org.apache.hadoop.hbase.mapreduce.TestMultiTableSnapshotInputFormat {
- private static final Log LOG = LogFactory.getLog(TestMultiTableSnapshotInputFormat.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestMultiTableSnapshotInputFormat.class);
@Override
protected void runJob(String jobName, Configuration c, List scans)
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java
index ace2ffab40..369f1c1174 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java
@@ -32,8 +32,6 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.Cell;
@@ -68,6 +66,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This tests the TableInputFormat and its recovery semantics
@@ -75,7 +75,7 @@ import org.mockito.stubbing.Answer;
@Category({MapReduceTests.class, LargeTests.class})
public class TestTableInputFormat {
- private static final Log LOG = LogFactory.getLog(TestTableInputFormat.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestTableInputFormat.class);
private final static HBaseTestingUtility UTIL = new HBaseTestingUtility();
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java
index 3f905cffea..d300e7d745 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java
@@ -23,8 +23,6 @@ import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.testclassification.LargeTests;
@@ -41,6 +39,8 @@ import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.RunningJob;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test Map/Reduce job over HBase tables. The map/reduce process we're testing
@@ -50,10 +50,10 @@ import org.junit.experimental.categories.Category;
@Category({MapReduceTests.class, LargeTests.class})
@SuppressWarnings("deprecation")
public class TestTableMapReduce extends TestTableMapReduceBase {
- private static final Log LOG =
- LogFactory.getLog(TestTableMapReduce.class.getName());
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestTableMapReduce.class.getName());
- protected Log getLog() { return LOG; }
+ protected Logger getLog() { return LOG; }
/**
* Pass the given key and processed record reduce
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java
index ac2f20d895..4a60110648 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java
@@ -28,8 +28,6 @@ import java.util.Iterator;
import java.util.List;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -53,15 +51,16 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet;
@Category({MapReduceTests.class, LargeTests.class})
public class TestTableMapReduceUtil {
- private static final Log LOG = LogFactory
- .getLog(TestTableMapReduceUtil.class);
+ private static final Logger LOG = LoggerFactory
+ .getLogger(TestTableMapReduceUtil.class);
private static Table presidentsTable;
private static final String TABLE_NAME = "People";
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableOutputFormatConnectionExhaust.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableOutputFormatConnectionExhaust.java
index 835117c020..785380f5a0 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableOutputFormatConnectionExhaust.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableOutputFormatConnectionExhaust.java
@@ -17,8 +17,6 @@
*/
package org.apache.hadoop.hbase.mapred;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@@ -30,6 +28,8 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
@@ -44,8 +44,8 @@ import static org.junit.Assert.fail;
@Category(MediumTests.class)
public class TestTableOutputFormatConnectionExhaust {
- private static final Log LOG =
- LogFactory.getLog(TestTableOutputFormatConnectionExhaust.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestTableOutputFormatConnectionExhaust.class);
private final static HBaseTestingUtility UTIL = new HBaseTestingUtility();
static final String TABLE = "TestTableOutputFormatConnectionExhaust";
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java
index c717fa96a9..17b51069f9 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.hbase.mapreduce;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
@@ -42,6 +40,8 @@ import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
@@ -60,7 +60,7 @@ import static org.junit.Assert.assertTrue;
public abstract class MultiTableInputFormatTestBase {
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().
withTimeout(this.getClass()).withLookingForStuckThread(true).build();
- static final Log LOG = LogFactory.getLog(TestMultiTableInputFormat.class);
+ static final Logger LOG = LoggerFactory.getLogger(TestMultiTableInputFormat.class);
public static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
static final String TABLE_NAME = "scantest";
static final byte[] INPUT_FAMILY = Bytes.toBytes("contents");
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java
index 362dca1963..d28b2c0d77 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java
@@ -18,8 +18,6 @@
package org.apache.hadoop.hbase.mapreduce;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -44,6 +42,8 @@ import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertFalse;
@@ -51,7 +51,7 @@ import java.io.IOException;
import java.util.Arrays;
public abstract class TableSnapshotInputFormatTestBase {
- private static final Log LOG = LogFactory.getLog(TableSnapshotInputFormatTestBase.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableSnapshotInputFormatTestBase.class);
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().
withTimeout(this.getClass()).withLookingForStuckThread(true).build();
protected final HBaseTestingUtility UTIL = new HBaseTestingUtility();
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
index aa2984f7f9..09226f606e 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
@@ -40,8 +40,6 @@ import java.util.concurrent.Callable;
import java.util.stream.Collectors;
import java.util.stream.Stream;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -110,6 +108,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestRule;
import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Simple test for {@link HFileOutputFormat2}.
@@ -131,7 +131,7 @@ public class TestHFileOutputFormat2 {
private HBaseTestingUtility util = new HBaseTestingUtility();
- private static final Log LOG = LogFactory.getLog(TestHFileOutputFormat2.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHFileOutputFormat2.class);
/**
* Simple mapper that makes KeyValue output.
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java
index 87e7852b63..6b3c71ce2b 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java
@@ -22,8 +22,6 @@ import static org.junit.Assert.assertEquals;
import java.util.HashMap;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -45,6 +43,8 @@ import org.junit.experimental.categories.Category;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Basic test for the HashTable M/R tool
@@ -52,7 +52,7 @@ import org.junit.rules.TestName;
@Category(LargeTests.class)
public class TestHashTable {
- private static final Log LOG = LogFactory.getLog(TestHashTable.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHashTable.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
index fcd01b6f48..9135676666 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
@@ -35,8 +35,6 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -88,6 +86,8 @@ import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tests the table import and table export MR job functionality
@@ -95,7 +95,7 @@ import org.mockito.stubbing.Answer;
@Category({VerySlowMapReduceTests.class, MediumTests.class})
public class TestImportExport {
- private static final Log LOG = LogFactory.getLog(TestImportExport.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestImportExport.class);
protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final byte[] ROW1 = Bytes.toBytesBinary("\\x32row1");
private static final byte[] ROW2 = Bytes.toBytesBinary("\\x32row2");
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java
index a47bef1dcd..b91c6c8198 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java
@@ -27,8 +27,6 @@ import java.util.List;
import java.util.Optional;
import java.util.UUID;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -66,12 +64,14 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MapReduceTests.class, LargeTests.class})
public class TestImportTSVWithOperationAttributes implements Configurable {
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().
withTimeout(this.getClass()).withLookingForStuckThread(true).build();
- private static final Log LOG = LogFactory.getLog(TestImportTSVWithOperationAttributes.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestImportTSVWithOperationAttributes.class);
protected static final String NAME = TestImportTsv.class.getSimpleName();
protected static HBaseTestingUtility util = new HBaseTestingUtility();
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java
index f121f20a0f..9ddbc65dda 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java
@@ -26,8 +26,6 @@ import java.util.List;
import java.util.Optional;
import java.util.UUID;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -54,11 +52,13 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MapReduceTests.class, LargeTests.class})
public class TestImportTSVWithTTLs implements Configurable {
- protected static final Log LOG = LogFactory.getLog(TestImportTSVWithTTLs.class);
+ protected static final Logger LOG = LoggerFactory.getLogger(TestImportTSVWithTTLs.class);
protected static final String NAME = TestImportTsv.class.getSimpleName();
protected static HBaseTestingUtility util = new HBaseTestingUtility();
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
index 469284733f..1f9dc186f0 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
@@ -29,8 +29,6 @@ import java.util.List;
import java.util.Set;
import java.util.UUID;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -74,11 +72,13 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MapReduceTests.class, LargeTests.class})
public class TestImportTSVWithVisibilityLabels implements Configurable {
- private static final Log LOG = LogFactory.getLog(TestImportTSVWithVisibilityLabels.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestImportTSVWithVisibilityLabels.class);
protected static final String NAME = TestImportTsv.class.getSimpleName();
protected static HBaseTestingUtility util = new HBaseTestingUtility();
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
index f6fcfa38ec..9484a94a39 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
@@ -32,8 +32,6 @@ import java.util.Map;
import java.util.Set;
import java.util.UUID;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -71,11 +69,13 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.ExpectedException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({VerySlowMapReduceTests.class, LargeTests.class})
public class TestImportTsv implements Configurable {
- private static final Log LOG = LogFactory.getLog(TestImportTsv.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestImportTsv.class);
protected static final String NAME = TestImportTsv.class.getSimpleName();
protected static HBaseTestingUtility util = new HBaseTestingUtility();
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestJarFinder.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestJarFinder.java
index 8187b73aa4..7eeee707ea 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestJarFinder.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestJarFinder.java
@@ -18,11 +18,11 @@
package org.apache.hadoop.hbase.mapreduce;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Assert;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.LoggerFactory;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
@@ -48,7 +48,7 @@ public class TestJarFinder {
public void testJar() throws Exception {
//picking a class that is for sure in a JAR in the classpath
- String jar = JarFinder.getJar(LogFactory.class);
+ String jar = JarFinder.getJar(LoggerFactory.class);
Assert.assertTrue(new File(jar).exists());
}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
index 694a359a5a..357f3750ef 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
@@ -26,8 +26,6 @@ import java.util.Iterator;
import java.util.Map;
import java.util.NavigableMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HConstants;
@@ -50,6 +48,8 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test Map/Reduce job over HBase tables. The map/reduce process we're testing
@@ -58,7 +58,7 @@ import org.junit.experimental.categories.Category;
*/
@Category({MapReduceTests.class, LargeTests.class})
public class TestMultithreadedTableMapper {
- private static final Log LOG = LogFactory.getLog(TestMultithreadedTableMapper.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestMultithreadedTableMapper.class);
private static final HBaseTestingUtility UTIL =
new HBaseTestingUtility();
static final TableName MULTI_REGION_TABLE_NAME = TableName.valueOf("mrtest");
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java
index 3b84e2d2d3..aba1714753 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java
@@ -27,8 +27,6 @@ import java.io.IOException;
import java.io.PrintStream;
import java.util.ArrayList;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName;
@@ -46,6 +44,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test the rowcounter map reduce job.
@@ -54,7 +54,7 @@ import org.junit.rules.TestRule;
public class TestRowCounter {
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().
withTimeout(this.getClass()).withLookingForStuckThread(true).build();
- private static final Log LOG = LogFactory.getLog(TestRowCounter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRowCounter.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private final static String TABLE_NAME = "testRowCounter";
private final static String TABLE_NAME_TS_RANGE = "testRowCounter_ts_range";
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSyncTable.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSyncTable.java
index 1e940d46cf..e2a04241a5 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSyncTable.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSyncTable.java
@@ -22,8 +22,6 @@ import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.util.Arrays;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
@@ -48,7 +46,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
/**
@@ -58,7 +57,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
public class TestSyncTable {
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().
withTimeout(this.getClass()).withLookingForStuckThread(true).build();
- private static final Log LOG = LogFactory.getLog(TestSyncTable.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSyncTable.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java
index db50899cfb..5453054373 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java
@@ -32,8 +32,6 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -64,6 +62,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This tests the TableInputFormat and its recovery semantics
@@ -72,7 +72,7 @@ import org.mockito.stubbing.Answer;
@Category(LargeTests.class)
public class TestTableInputFormat {
- private static final Log LOG = LogFactory.getLog(TestTableInputFormat.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestTableInputFormat.class);
private final static HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static MiniMRCluster mrCluster;
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java
index d127adb22f..3d97071379 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java
@@ -25,8 +25,6 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.*;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -47,6 +45,8 @@ import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
@@ -62,7 +62,7 @@ import org.junit.BeforeClass;
*/
public abstract class TestTableInputFormatScanBase {
- private static final Log LOG = LogFactory.getLog(TestTableInputFormatScanBase.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestTableInputFormatScanBase.class);
static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
static final TableName TABLE_NAME = TableName.valueOf("scantest");
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java
index d702e0d3c9..9c38a0ddf3 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java
@@ -27,8 +27,6 @@ import java.io.IOException;
import java.util.Map;
import java.util.NavigableMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.TableName;
@@ -49,6 +47,8 @@ import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test Map/Reduce job over HBase tables. The map/reduce process we're testing
@@ -58,10 +58,10 @@ import org.junit.experimental.categories.Category;
@Category({VerySlowMapReduceTests.class, LargeTests.class})
public class TestTableMapReduce extends TestTableMapReduceBase {
- private static final Log LOG = LogFactory.getLog(TestTableMapReduce.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestTableMapReduce.class);
@Override
- protected Log getLog() { return LOG; }
+ protected Logger getLog() { return LOG; }
/**
* Pass the given key and processed record reduce
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
index 27bf0637ec..60e2622856 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
@@ -26,7 +26,6 @@ import java.util.Iterator;
import java.util.Map;
import java.util.NavigableMap;
-import org.apache.commons.logging.Log;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.Cell;
@@ -46,6 +45,7 @@ import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
/**
* A base class for a test Map/Reduce job over HBase tables. The map/reduce process we're testing
@@ -70,7 +70,7 @@ public abstract class TestTableMapReduceBase {
/**
* Retrieve my logger instance.
*/
- protected abstract Log getLog();
+ protected abstract Logger getLog();
/**
* Handles API-specifics for setting up and executing the job.
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java
index 890eb2fe11..6c6065abdf 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java
@@ -24,8 +24,6 @@ import static org.mockito.Mockito.when;
import java.io.IOException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
@@ -55,7 +53,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import java.util.Arrays;
@@ -68,7 +67,7 @@ import org.apache.hadoop.hbase.util.RegionSplitter;
@Category({VerySlowMapReduceTests.class, LargeTests.class})
public class TestTableSnapshotInputFormat extends TableSnapshotInputFormatTestBase {
- private static final Log LOG = LogFactory.getLog(TestTableSnapshotInputFormat.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestTableSnapshotInputFormat.class);
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().
withTimeout(this.getClass()).withLookingForStuckThread(true).build();
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
index 6796c944ad..3f0c591f2f 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
@@ -18,8 +18,6 @@
*/
package org.apache.hadoop.hbase.mapreduce;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
@@ -51,6 +49,8 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
@@ -62,7 +62,7 @@ import java.util.TreeMap;
@Category({MapReduceTests.class, LargeTests.class})
public class TestTimeRangeMapRed {
- private final static Log log = LogFactory.getLog(TestTimeRangeMapRed.class);
+ private final static Logger log = LoggerFactory.getLogger(TestTimeRangeMapRed.class);
private static final HBaseTestingUtility UTIL =
new HBaseTestingUtility();
private Admin admin;
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java
index 65a3421461..18bb1353ee 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java
@@ -26,8 +26,6 @@ import java.util.NavigableMap;
import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -57,13 +55,15 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* JUnit tests for the WALRecordReader
*/
@Category({MapReduceTests.class, MediumTests.class})
public class TestWALRecordReader {
- private static final Log LOG = LogFactory.getLog(TestWALRecordReader.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestWALRecordReader.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static Configuration conf;
private static FileSystem fs;
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
index 143f585dbd..8aefa4d169 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
@@ -30,8 +30,6 @@ import java.util.List;
import java.util.NavigableMap;
import java.util.TreeMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -84,14 +82,15 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
@Category({ReplicationTests.class, LargeTests.class})
public class TestReplicationSmallTests extends TestReplicationBase {
- private static final Log LOG = LogFactory.getLog(TestReplicationSmallTests.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestReplicationSmallTests.class);
private static final String PEER_ID = "2";
@Rule
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java
index 98d6311b8f..8703ca0206 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java
@@ -28,10 +28,9 @@ import java.net.URI;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
+import java.util.Objects;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -51,13 +50,13 @@ import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
-import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
@@ -68,7 +67,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot
public class TestExportSnapshot {
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().
withTimeout(this.getClass()).withLookingForStuckThread(true).build();
- private static final Log LOG = LogFactory.getLog(TestExportSnapshot.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestExportSnapshot.class);
protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@@ -350,7 +349,7 @@ public class TestExportSnapshot {
FileStatus[] list = FSUtils.listStatus(fs, dir);
if (list != null) {
for (FileStatus fstat: list) {
- LOG.debug(fstat.getPath());
+ LOG.debug(Objects.toString(fstat.getPath()));
if (fstat.isDirectory()) {
files.addAll(listFiles(fs, root, fstat.getPath()));
} else {
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotNoCluster.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotNoCluster.java
index 0077850234..f3d08ba52f 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotNoCluster.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotNoCluster.java
@@ -18,8 +18,6 @@
package org.apache.hadoop.hbase.snapshot;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -37,6 +35,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test Export Snapshot Tool
@@ -45,7 +45,7 @@ import org.junit.rules.TestRule;
public class TestExportSnapshotNoCluster {
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().
withTimeout(this.getClass()).withLookingForStuckThread(true).build();
- private static final Log LOG = LogFactory.getLog(TestExportSnapshotNoCluster.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestExportSnapshotNoCluster.class);
protected final static HBaseCommonTestingUtility TEST_UTIL = new HBaseCommonTestingUtility();
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
index 1d9b74e989..c620b8f55a 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
@@ -29,8 +29,6 @@ import java.util.concurrent.atomic.AtomicReference;
import javax.crypto.spec.SecretKeySpec;
import org.apache.commons.cli.CommandLine;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@@ -40,6 +38,8 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -67,7 +67,7 @@ import org.apache.hadoop.util.ToolRunner;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
public class LoadTestTool extends AbstractHBaseTool {
- private static final Log LOG = LogFactory.getLog(LoadTestTool.class);
+ private static final Logger LOG = LoggerFactory.getLogger(LoadTestTool.class);
private static final String COLON = ":";
/** Table name for the test */
@@ -579,7 +579,7 @@ public class LoadTestTool extends AbstractHBaseTool {
try {
addAuthInfoToConf(authConfig, conf, superUser, userNames);
} catch (IOException exp) {
- LOG.error(exp);
+ LOG.error(exp.toString(), exp);
return EXIT_FAILURE;
}
userOwner = User.create(HBaseKerberosUtils.loginAndReturnUGI(conf, superUser));
@@ -609,7 +609,7 @@ public class LoadTestTool extends AbstractHBaseTool {
AccessControlClient.grant(ConnectionFactory.createConnection(conf),
tableName, userOwner.getShortName(), null, null, actions);
} catch (Throwable e) {
- LOG.fatal("Error in granting permission for the user " + userOwner.getShortName(), e);
+ LOG.error("Error in granting permission for the user " + userOwner.getShortName(), e);
return EXIT_FAILURE;
}
}
diff --git a/hbase-metrics-api/pom.xml b/hbase-metrics-api/pom.xml
index 3baa33034f..5e138495ed 100644
--- a/hbase-metrics-api/pom.xml
+++ b/hbase-metrics-api/pom.xml
@@ -85,8 +85,8 @@
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apiorg.apache.commons
diff --git a/hbase-metrics-api/src/main/java/org/apache/hadoop/hbase/metrics/MetricRegistriesLoader.java b/hbase-metrics-api/src/main/java/org/apache/hadoop/hbase/metrics/MetricRegistriesLoader.java
index 0c29e22e37..d398c25707 100644
--- a/hbase-metrics-api/src/main/java/org/apache/hadoop/hbase/metrics/MetricRegistriesLoader.java
+++ b/hbase-metrics-api/src/main/java/org/apache/hadoop/hbase/metrics/MetricRegistriesLoader.java
@@ -25,16 +25,16 @@ import java.util.ArrayList;
import java.util.List;
import java.util.ServiceLoader;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.ReflectionUtils;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
@InterfaceAudience.Private
public class MetricRegistriesLoader {
- private static final Log LOG = LogFactory.getLog(MetricRegistries.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MetricRegistries.class);
private static final String defaultClass
= "org.apache.hadoop.hbase.metrics.impl.MetricRegistriesImpl";
diff --git a/hbase-procedure/pom.xml b/hbase-procedure/pom.xml
index a764f2a1e1..c791002e72 100644
--- a/hbase-procedure/pom.xml
+++ b/hbase-procedure/pom.xml
@@ -86,8 +86,8 @@
hbase-shaded-miscellaneous
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apicommons-cli
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/AbstractProcedureScheduler.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/AbstractProcedureScheduler.java
index 3e474513d8..fbfa5b2f10 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/AbstractProcedureScheduler.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/AbstractProcedureScheduler.java
@@ -23,13 +23,13 @@ import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public abstract class AbstractProcedureScheduler implements ProcedureScheduler {
- private static final Log LOG = LogFactory.getLog(AbstractProcedureScheduler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AbstractProcedureScheduler.class);
private final ReentrantLock schedulerLock = new ReentrantLock();
private final Condition schedWaitCond = schedulerLock.newCondition();
private boolean running = false;
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
index cbbd0e73d5..64c0233e7e 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
@@ -23,10 +23,10 @@ import java.util.Arrays;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.exceptions.TimeoutIOException;
import org.apache.hadoop.hbase.metrics.Counter;
import org.apache.hadoop.hbase.metrics.Histogram;
@@ -88,7 +88,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
@InterfaceAudience.Private
@InterfaceStability.Evolving
public abstract class Procedure implements Comparable> {
- private static final Log LOG = LogFactory.getLog(Procedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Procedure.class);
public static final long NO_PROC_ID = -1;
protected static final int NO_TIMEOUT = -1;
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureEvent.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureEvent.java
index 20803f453f..fb3d7edaa2 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureEvent.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureEvent.java
@@ -18,10 +18,9 @@
package org.apache.hadoop.hbase.procedure2;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -30,7 +29,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public class ProcedureEvent {
- private static final Log LOG = LogFactory.getLog(ProcedureEvent.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ProcedureEvent.class);
private final T object;
private boolean ready = false;
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
index ac0487165e..e72c039194 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
@@ -40,12 +40,12 @@ import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.DelayQueue;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.exceptions.IllegalArgumentIOException;
import org.apache.hadoop.hbase.procedure2.Procedure.LockState;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
@@ -75,7 +75,7 @@ import org.apache.hadoop.hbase.util.Threads;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class ProcedureExecutor {
- private static final Log LOG = LogFactory.getLog(ProcedureExecutor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ProcedureExecutor.class);
public static final String CHECK_OWNER_SET_CONF_KEY = "hbase.procedure.check.owner.set";
private static final boolean DEFAULT_CHECK_OWNER_SET = false;
@@ -160,7 +160,7 @@ public class ProcedureExecutor {
*/
private static class CompletedProcedureCleaner
extends ProcedureInMemoryChore {
- private static final Log LOG = LogFactory.getLog(CompletedProcedureCleaner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CompletedProcedureCleaner.class);
private static final String CLEANER_INTERVAL_CONF_KEY = "hbase.procedure.cleaner.interval";
private static final int DEFAULT_CLEANER_INTERVAL = 30 * 1000; // 30sec
@@ -1364,7 +1364,7 @@ public class ProcedureExecutor {
return LockState.LOCK_YIELD_WAIT;
} catch (Throwable e) {
// Catch NullPointerExceptions or similar errors...
- LOG.fatal("CODE-BUG: Uncaught runtime exception for " + proc, e);
+ LOG.error("CODE-BUG: Uncaught runtime exception for " + proc, e);
}
// allows to kill the executor before something is stored to the wal.
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java
index 2b66e7cf6d..65124f62eb 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java
@@ -32,10 +32,10 @@ import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.procedure2.util.DelayedUtil;
import org.apache.hadoop.hbase.procedure2.util.DelayedUtil.DelayedContainerWithTimestamp;
import org.apache.hadoop.hbase.procedure2.util.DelayedUtil.DelayedWithTimeout;
@@ -57,7 +57,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultima
*/
@InterfaceAudience.Private
public abstract class RemoteProcedureDispatcher> {
- private static final Log LOG = LogFactory.getLog(RemoteProcedureDispatcher.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RemoteProcedureDispatcher.class);
public static final String THREAD_POOL_SIZE_CONF_KEY =
"hbase.procedure.remote.dispatcher.threadpool.size";
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RootProcedureState.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RootProcedureState.java
index 1f928a42b5..46185eaae7 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RootProcedureState.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RootProcedureState.java
@@ -23,10 +23,10 @@ import java.util.HashSet;
import java.util.List;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState;
/**
@@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedu
@InterfaceAudience.Private
@InterfaceStability.Evolving
class RootProcedureState {
- private static final Log LOG = LogFactory.getLog(RootProcedureState.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RootProcedureState.class);
private enum State {
RUNNING, // The Procedure is running or ready to run
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
index 893ee0cd57..ade07cc825 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
@@ -24,10 +24,10 @@ import java.util.Arrays;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData;
/**
@@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMa
@InterfaceStability.Evolving
public abstract class StateMachineProcedure
extends Procedure {
- private static final Log LOG = LogFactory.getLog(StateMachineProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(StateMachineProcedure.class);
private static final int EOF_STATE = Integer.MIN_VALUE;
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java
index edfb3adef2..1e9ef6e78d 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java
@@ -20,14 +20,14 @@ package org.apache.hadoop.hbase.procedure2.store.wal;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStoreTracker;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader;
@@ -39,7 +39,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedu
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class ProcedureWALFile implements Comparable {
- private static final Log LOG = LogFactory.getLog(ProcedureWALFile.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ProcedureWALFile.class);
private ProcedureWALHeader header;
private FSDataInputStream stream;
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java
index a34afe5afe..84edd0fbcf 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java
@@ -25,12 +25,12 @@ import java.io.InputStream;
import java.io.OutputStream;
import java.util.Iterator;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.io.util.StreamUtils;
import org.apache.hadoop.hbase.procedure2.Procedure;
import org.apache.hadoop.hbase.procedure2.ProcedureUtil;
@@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedu
@InterfaceAudience.Private
@InterfaceStability.Evolving
public final class ProcedureWALFormat {
- private static final Log LOG = LogFactory.getLog(ProcedureWALFormat.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ProcedureWALFormat.class);
static final byte LOG_TYPE_STREAM = 0;
static final byte LOG_TYPE_COMPACTED = 1;
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
index 36d8270141..0e110884d8 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
@@ -22,11 +22,11 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.procedure2.Procedure;
import org.apache.hadoop.hbase.procedure2.ProcedureUtil;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore.ProcedureIterator;
@@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedu
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class ProcedureWALFormatReader {
- private static final Log LOG = LogFactory.getLog(ProcedureWALFormatReader.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ProcedureWALFormatReader.class);
// ==============================================================================================
// We read the WALs in reverse order from the newest to the oldest.
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.java
index 84cda6526f..b6376aa512 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.java
@@ -37,8 +37,6 @@ import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.commons.collections4.queue.CircularFifoQueue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileAlreadyExistsException;
@@ -59,6 +57,8 @@ import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
@@ -67,7 +67,7 @@ import org.apache.yetus.audience.InterfaceStability;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class WALProcedureStore extends ProcedureStoreBase {
- private static final Log LOG = LogFactory.getLog(WALProcedureStore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(WALProcedureStore.class);
public static final String LOG_PREFIX = "pv2-";
/** Used to construct the name of the log directory for master procedures */
public static final String MASTER_PROCEDURE_LOGDIR = "MasterProcWALs";
@@ -496,7 +496,7 @@ public class WALProcedureStore extends ProcedureStoreBase {
} catch (IOException e) {
// We are not able to serialize the procedure.
// this is a code error, and we are not able to go on.
- LOG.fatal("Unable to serialize one of the procedure: proc=" + proc +
+ LOG.error("Unable to serialize one of the procedure: proc=" + proc +
", subprocs=" + Arrays.toString(subprocs), e);
throw new RuntimeException(e);
} finally {
@@ -525,7 +525,7 @@ public class WALProcedureStore extends ProcedureStoreBase {
} catch (IOException e) {
// We are not able to serialize the procedure.
// this is a code error, and we are not able to go on.
- LOG.fatal("Unable to serialize one of the procedure: " + Arrays.toString(procs), e);
+ LOG.error("Unable to serialize one of the procedure: " + Arrays.toString(procs), e);
throw new RuntimeException(e);
} finally {
releaseSlot(slot);
@@ -548,7 +548,7 @@ public class WALProcedureStore extends ProcedureStoreBase {
} catch (IOException e) {
// We are not able to serialize the procedure.
// this is a code error, and we are not able to go on.
- LOG.fatal("Unable to serialize the procedure: " + proc, e);
+ LOG.error("Unable to serialize the procedure: " + proc, e);
throw new RuntimeException(e);
} finally {
releaseSlot(slot);
@@ -571,7 +571,7 @@ public class WALProcedureStore extends ProcedureStoreBase {
} catch (IOException e) {
// We are not able to serialize the procedure.
// this is a code error, and we are not able to go on.
- LOG.fatal("Unable to serialize the procedure: " + procId, e);
+ LOG.error("Unable to serialize the procedure: " + procId, e);
throw new RuntimeException(e);
} finally {
releaseSlot(slot);
@@ -596,7 +596,7 @@ public class WALProcedureStore extends ProcedureStoreBase {
} catch (IOException e) {
// We are not able to serialize the procedure.
// this is a code error, and we are not able to go on.
- LOG.fatal("Unable to serialize the procedure: " + proc, e);
+ LOG.error("Unable to serialize the procedure: " + proc, e);
throw new RuntimeException(e);
} finally {
releaseSlot(slot);
@@ -632,7 +632,7 @@ public class WALProcedureStore extends ProcedureStoreBase {
} catch (IOException e) {
// We are not able to serialize the procedure.
// this is a code error, and we are not able to go on.
- LOG.fatal("Unable to serialize the procedures: " + Arrays.toString(procIds), e);
+ LOG.error("Unable to serialize the procedures: " + Arrays.toString(procIds), e);
throw new RuntimeException(e);
} finally {
releaseSlot(slot);
@@ -902,7 +902,7 @@ public class WALProcedureStore extends ProcedureStoreBase {
LOG.warn("Unable to roll the log, attempt=" + (i + 1), e);
}
}
- LOG.fatal("Unable to roll the log");
+ LOG.error("Unable to roll the log");
return false;
}
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/ProcedureTestingUtility.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/ProcedureTestingUtility.java
index 6e0c02eb2a..2558a31f08 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/ProcedureTestingUtility.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/ProcedureTestingUtility.java
@@ -27,8 +27,6 @@ import java.util.ArrayList;
import java.util.Set;
import java.util.concurrent.Callable;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -44,9 +42,11 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState;
import org.apache.hadoop.hbase.util.NonceKey;
import org.apache.hadoop.hbase.util.Threads;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class ProcedureTestingUtility {
- private static final Log LOG = LogFactory.getLog(ProcedureTestingUtility.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ProcedureTestingUtility.class);
private ProcedureTestingUtility() {
}
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestChildProcedures.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestChildProcedures.java
index 4c1611a6c5..79ce73617c 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestChildProcedures.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestChildProcedures.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.procedure2;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
@@ -32,13 +30,15 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@Category({MasterTests.class, SmallTests.class})
public class TestChildProcedures {
- private static final Log LOG = LogFactory.getLog(TestChildProcedures.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestChildProcedures.class);
private static final int PROCEDURE_EXECUTOR_SLOTS = 1;
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureEvents.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureEvents.java
index d2b2b7d54d..9588d998da 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureEvents.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureEvents.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.procedure2;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
@@ -37,12 +35,14 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
@Category({MasterTests.class, SmallTests.class})
public class TestProcedureEvents {
- private static final Log LOG = LogFactory.getLog(TestProcedureEvents.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestProcedureEvents.class);
private TestProcEnv procEnv;
private ProcedureStore procStore;
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecution.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecution.java
index ed6d512df7..ae781cd913 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecution.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecution.java
@@ -21,9 +21,8 @@ package org.apache.hadoop.hbase.procedure2;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
+import java.util.Objects;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
@@ -36,13 +35,15 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@Category({MasterTests.class, SmallTests.class})
public class TestProcedureExecution {
- private static final Log LOG = LogFactory.getLog(TestProcedureExecution.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestProcedureExecution.class);
private static final int PROCEDURE_EXECUTOR_SLOTS = 1;
private static final Procedure NULL_PROC = null;
@@ -136,7 +137,7 @@ public class TestProcedureExecution {
// subProc1 has a "null" subprocedure which is catched as InvalidArgument
// failed state with 2 execute and 2 rollback
- LOG.info(state);
+ LOG.info(Objects.toString(state));
Procedure> result = procExecutor.getResult(rootId);
assertTrue(state.toString(), result.isFailed());
ProcedureTestingUtility.assertIsIllegalArgumentException(result);
@@ -157,7 +158,7 @@ public class TestProcedureExecution {
long rootId = ProcedureTestingUtility.submitAndWait(procExecutor, rootProc);
// successful state, with 3 execute
- LOG.info(state);
+ LOG.info(Objects.toString(state));
Procedure> result = procExecutor.getResult(rootId);
ProcedureTestingUtility.assertProcNotFailed(result);
assertEquals(state.toString(), 3, state.size());
@@ -173,7 +174,7 @@ public class TestProcedureExecution {
long rootId = ProcedureTestingUtility.submitAndWait(procExecutor, rootProc);
// the 3rd proc fail, rollback after 2 successful execution
- LOG.info(state);
+ LOG.info(Objects.toString(state));
Procedure> result = procExecutor.getResult(rootId);
assertTrue(state.toString(), result.isFailed());
LOG.info(result.getException().getMessage());
@@ -300,7 +301,7 @@ public class TestProcedureExecution {
long startTime = EnvironmentEdgeManager.currentTime();
long rootId = ProcedureTestingUtility.submitAndWait(procExecutor, proc);
long execTime = EnvironmentEdgeManager.currentTime() - startTime;
- LOG.info(state);
+ LOG.info(Objects.toString(state));
assertTrue("we didn't wait enough execTime=" + execTime, execTime >= PROC_TIMEOUT_MSEC);
Procedure> result = procExecutor.getResult(rootId);
assertTrue(state.toString(), result.isFailed());
@@ -316,7 +317,7 @@ public class TestProcedureExecution {
Procedure proc = new TestWaitingProcedure("wproc", state, true);
proc.setTimeout(2500);
long rootId = ProcedureTestingUtility.submitAndWait(procExecutor, proc);
- LOG.info(state);
+ LOG.info(Objects.toString(state));
Procedure> result = procExecutor.getResult(rootId);
assertTrue(state.toString(), result.isFailed());
ProcedureTestingUtility.assertIsTimeoutException(result);
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecutor.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecutor.java
index 289987be8b..29a0472a45 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecutor.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecutor.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.procedure2;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
@@ -36,12 +34,14 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
@Category({MasterTests.class, SmallTests.class})
public class TestProcedureExecutor {
- private static final Log LOG = LogFactory.getLog(TestProcedureExecutor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestProcedureExecutor.class);
private TestProcEnv procEnv;
private NoopProcedureStore procStore;
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureInMemoryChore.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureInMemoryChore.java
index 50ccfa60f3..6546ea3c60 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureInMemoryChore.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureInMemoryChore.java
@@ -22,8 +22,6 @@ import java.io.IOException;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.procedure2.store.NoopProcedureStore;
import org.apache.hadoop.hbase.testclassification.SmallTests;
@@ -32,6 +30,8 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
@@ -39,7 +39,7 @@ import static org.junit.Assert.assertTrue;
@Category({MasterTests.class, SmallTests.class})
public class TestProcedureInMemoryChore {
- private static final Log LOG = LogFactory.getLog(TestProcedureInMemoryChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestProcedureInMemoryChore.class);
private static final int PROCEDURE_EXECUTOR_SLOTS = 1;
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureMetrics.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureMetrics.java
index 6246629ef9..0550a91abe 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureMetrics.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureMetrics.java
@@ -18,8 +18,6 @@
package org.apache.hadoop.hbase.procedure2;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
@@ -30,6 +28,8 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
@@ -39,7 +39,7 @@ import static org.junit.Assert.assertTrue;
@Category({MasterTests.class, SmallTests.class})
public class TestProcedureMetrics {
- private static final Log LOG = LogFactory.getLog(TestProcedureMetrics.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestProcedureMetrics.class);
private static final int PROCEDURE_EXECUTOR_SLOTS = 1;
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureNonce.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureNonce.java
index 12a8012ef8..bebfae001a 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureNonce.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureNonce.java
@@ -22,8 +22,6 @@ import java.io.IOException;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.CountDownLatch;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
@@ -39,6 +37,8 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
@@ -46,7 +46,7 @@ import static org.junit.Assert.assertTrue;
@Category({MasterTests.class, SmallTests.class})
public class TestProcedureNonce {
- private static final Log LOG = LogFactory.getLog(TestProcedureNonce.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestProcedureNonce.class);
private static final int PROCEDURE_EXECUTOR_SLOTS = 2;
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.java
index 06f8833a58..8fe56fe00b 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.java
@@ -22,8 +22,6 @@ import java.io.IOException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.CountDownLatch;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
@@ -39,6 +37,8 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
@@ -46,7 +46,7 @@ import static org.junit.Assert.assertTrue;
@Category({MasterTests.class, SmallTests.class})
public class TestProcedureRecovery {
- private static final Log LOG = LogFactory.getLog(TestProcedureRecovery.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestProcedureRecovery.class);
private static final int PROCEDURE_EXECUTOR_SLOTS = 1;
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java
index 12b21847db..20d60ceb45 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java
@@ -22,8 +22,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
@@ -37,6 +35,8 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@@ -44,7 +44,7 @@ import static org.junit.Assert.fail;
@Category({MasterTests.class, LargeTests.class})
public class TestProcedureReplayOrder {
- private static final Log LOG = LogFactory.getLog(TestProcedureReplayOrder.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestProcedureReplayOrder.class);
private static final int NUM_THREADS = 16;
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSchedulerConcurrency.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSchedulerConcurrency.java
index 1c8f1ebb66..6116736d3c 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSchedulerConcurrency.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSchedulerConcurrency.java
@@ -22,8 +22,6 @@ import java.io.IOException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.ConcurrentSkipListSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.NoopProcedure;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.MasterTests;
@@ -33,6 +31,8 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
@@ -40,7 +40,7 @@ import static org.junit.Assert.assertTrue;
@Category({MasterTests.class, MediumTests.class})
public class TestProcedureSchedulerConcurrency {
- private static final Log LOG = LogFactory.getLog(TestProcedureEvents.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestProcedureEvents.class);
private SimpleProcedureScheduler procSched;
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSuspended.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSuspended.java
index 3803abae27..1a426505ea 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSuspended.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSuspended.java
@@ -25,8 +25,6 @@ import java.util.ArrayList;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.procedure2.store.NoopProcedureStore;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
@@ -37,10 +35,12 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, SmallTests.class})
public class TestProcedureSuspended {
- private static final Log LOG = LogFactory.getLog(TestProcedureSuspended.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestProcedureSuspended.class);
private static final int PROCEDURE_EXECUTOR_SLOTS = 1;
private static final Procedure NULL_PROC = null;
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestStateMachineProcedure.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestStateMachineProcedure.java
index cbe50f2c2d..f304ba7230 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestStateMachineProcedure.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestStateMachineProcedure.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.procedure2;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
@@ -35,13 +33,15 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@Category({MasterTests.class, SmallTests.class})
public class TestStateMachineProcedure {
- private static final Log LOG = LogFactory.getLog(TestStateMachineProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestStateMachineProcedure.class);
private static final Exception TEST_FAILURE_EXCEPTION = new Exception("test failure") {
@Override
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestYieldProcedures.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestYieldProcedures.java
index 017992cfea..202353526a 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestYieldProcedures.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestYieldProcedures.java
@@ -24,8 +24,6 @@ import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
@@ -37,13 +35,15 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@Category({MasterTests.class, SmallTests.class})
public class TestYieldProcedures {
- private static final Log LOG = LogFactory.getLog(TestYieldProcedures.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestYieldProcedures.class);
private static final int PROCEDURE_EXECUTOR_SLOTS = 1;
private static final Procedure NULL_PROC = null;
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/TestProcedureStoreTracker.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/TestProcedureStoreTracker.java
index 550116e2e1..e4766f6f15 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/TestProcedureStoreTracker.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/TestProcedureStoreTracker.java
@@ -20,13 +20,13 @@ package org.apache.hadoop.hbase.procedure2.store;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.apache.hadoop.hbase.procedure2.store.ProcedureStoreTracker.BitSetNode;
import static org.junit.Assert.assertEquals;
@@ -35,7 +35,7 @@ import static org.junit.Assert.assertTrue;
@Category({MasterTests.class, SmallTests.class})
public class TestProcedureStoreTracker {
- private static final Log LOG = LogFactory.getLog(TestProcedureStoreTracker.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestProcedureStoreTracker.class);
@Test
public void testSeqInsertAndDelete() {
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestStressWALProcedureStore.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestStressWALProcedureStore.java
index 98ec1146e7..31c9cf3ee2 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestStressWALProcedureStore.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestStressWALProcedureStore.java
@@ -22,8 +22,6 @@ import java.io.IOException;
import java.util.Random;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -40,6 +38,8 @@ import org.junit.Before;
import org.junit.Test;
import org.junit.Ignore;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
@@ -48,7 +48,7 @@ import static org.junit.Assert.fail;
@Category({MasterTests.class, LargeTests.class})
public class TestStressWALProcedureStore {
- private static final Log LOG = LogFactory.getLog(TestWALProcedureStore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestWALProcedureStore.class);
private static final int PROCEDURE_STORE_SLOTS = 8;
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.java
index 98b1b7c9d6..a7bab8f625 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.java
@@ -28,8 +28,6 @@ import java.util.Comparator;
import java.util.HashSet;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileStatus;
@@ -53,6 +51,8 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
@@ -61,7 +61,7 @@ import static org.junit.Assert.fail;
@Category({MasterTests.class, SmallTests.class})
public class TestWALProcedureStore {
- private static final Log LOG = LogFactory.getLog(TestWALProcedureStore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestWALProcedureStore.class);
private static final int PROCEDURE_STORE_SLOTS = 1;
private static final Procedure NULL_PROC = null;
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/util/TestDelayedUtil.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/util/TestDelayedUtil.java
index 019b4567fd..dcb133e9ef 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/util/TestDelayedUtil.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/util/TestDelayedUtil.java
@@ -18,18 +18,18 @@
package org.apache.hadoop.hbase.procedure2.util;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
@Category({MasterTests.class, SmallTests.class})
public class TestDelayedUtil {
- private static final Log LOG = LogFactory.getLog(TestDelayedUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestDelayedUtil.class);
@Test
public void testDelayedContainerEquals() {
diff --git a/hbase-protocol-shaded/pom.xml b/hbase-protocol-shaded/pom.xml
index 16766919c5..e71a21bafc 100644
--- a/hbase-protocol-shaded/pom.xml
+++ b/hbase-protocol-shaded/pom.xml
@@ -169,6 +169,7 @@
junit:junitlog4j:log4jcommons-logging:commons-logging
+ org.slf4j:slf4j-apiorg.apache.yetus:audience-annotationscom.github.stephenc.fingbugs:*
diff --git a/hbase-protocol/pom.xml b/hbase-protocol/pom.xml
index a608dffccd..d07a031dfe 100644
--- a/hbase-protocol/pom.xml
+++ b/hbase-protocol/pom.xml
@@ -165,8 +165,8 @@
protobuf-java
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-api
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/util/ByteStringer.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/util/ByteStringer.java
index e8491f7e3d..65f1cc6721 100644
--- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/util/ByteStringer.java
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/util/ByteStringer.java
@@ -17,9 +17,9 @@
*/
package org.apache.hadoop.hbase.util;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.google.protobuf.ByteString;
import com.google.protobuf.HBaseZeroCopyByteString;
@@ -29,7 +29,7 @@ import com.google.protobuf.HBaseZeroCopyByteString;
*/
@InterfaceAudience.Private
public class ByteStringer {
- private static final Log LOG = LogFactory.getLog(ByteStringer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ByteStringer.class);
/**
* Flag set at class loading time.
diff --git a/hbase-replication/pom.xml b/hbase-replication/pom.xml
index 0236601b4f..ab221999ad 100644
--- a/hbase-replication/pom.xml
+++ b/hbase-replication/pom.xml
@@ -127,8 +127,8 @@
commons-lang3
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apiorg.apache.zookeeper
diff --git a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerZKImpl.java b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerZKImpl.java
index 214a313631..b221b601e0 100644
--- a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerZKImpl.java
+++ b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerZKImpl.java
@@ -25,8 +25,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.TableName;
@@ -40,11 +38,13 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.KeeperException.NodeExistsException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class ReplicationPeerZKImpl extends ReplicationStateZKBase
implements ReplicationPeer, Abortable, Closeable {
- private static final Log LOG = LogFactory.getLog(ReplicationPeerZKImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationPeerZKImpl.class);
private ReplicationPeerConfig peerConfig;
private final String id;
@@ -187,7 +187,7 @@ public class ReplicationPeerZKImpl extends ReplicationStateZKBase
@Override
public void abort(String why, Throwable e) {
- LOG.fatal("The ReplicationPeer corresponding to peer " + peerConfig
+ LOG.error("The ReplicationPeer corresponding to peer " + peerConfig
+ " was aborted for the following reason(s):" + why, e);
}
diff --git a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java
index 2c3bbd5bbb..ff6519dbd0 100644
--- a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java
+++ b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java
@@ -28,8 +28,6 @@ import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.CompoundConfiguration;
@@ -47,6 +45,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This class provides an implementation of the ReplicationPeers interface using ZooKeeper. The
@@ -82,7 +82,7 @@ public class ReplicationPeersZKImpl extends ReplicationStateZKBase implements Re
private final ReplicationQueuesClient queuesClient;
private Abortable abortable;
- private static final Log LOG = LogFactory.getLog(ReplicationPeersZKImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationPeersZKImpl.class);
public ReplicationPeersZKImpl(final ZKWatcher zk, final Configuration conf,
final ReplicationQueuesClient queuesClient, Abortable abortable) {
diff --git a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueueInfo.java b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueueInfo.java
index 68b7ebeec9..ecd888f51e 100644
--- a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueueInfo.java
+++ b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueueInfo.java
@@ -23,10 +23,9 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.ServerName;
/**
@@ -36,7 +35,7 @@ import org.apache.hadoop.hbase.ServerName;
*/
@InterfaceAudience.Private
public class ReplicationQueueInfo {
- private static final Log LOG = LogFactory.getLog(ReplicationQueueInfo.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationQueueInfo.class);
private final String peerId;
private final String peerClusterZnode;
diff --git a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
index b998f15975..e85b42ae0a 100644
--- a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
+++ b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.replication;
import java.util.List;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet;
@@ -33,12 +31,14 @@ import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.data.Stat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class ReplicationQueuesClientZKImpl extends ReplicationStateZKBase implements
ReplicationQueuesClient {
- Log LOG = LogFactory.getLog(ReplicationQueuesClientZKImpl.class);
+ Logger LOG = LoggerFactory.getLogger(ReplicationQueuesClientZKImpl.class);
public ReplicationQueuesClientZKImpl(ReplicationQueuesClientArguments args) {
this(args.getZk(), args.getConf(), args.getAbortable());
diff --git a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
index 95fd29430c..7551cb7660 100644
--- a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
+++ b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
@@ -23,8 +23,6 @@ import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Abortable;
@@ -38,6 +36,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This class provides an implementation of the
@@ -67,7 +67,7 @@ public class ReplicationQueuesZKImpl extends ReplicationStateZKBase implements R
/** Znode containing all replication queues for this region server. */
private String myQueuesZnode;
- private static final Log LOG = LogFactory.getLog(ReplicationQueuesZKImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationQueuesZKImpl.class);
public ReplicationQueuesZKImpl(ReplicationQueuesArguments args) {
this(args.getZk(), args.getConf(), args.getAbortable());
diff --git a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java
index 300a93b602..9a1d9aaefb 100644
--- a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java
+++ b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java
@@ -22,8 +22,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.zookeeper.ZKListener;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
@@ -32,6 +30,8 @@ import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This class is a ZooKeeper implementation of the ReplicationTracker interface. This class is
@@ -41,7 +41,7 @@ import org.apache.zookeeper.KeeperException;
@InterfaceAudience.Private
public class ReplicationTrackerZKImpl extends ReplicationStateZKBase implements ReplicationTracker {
- private static final Log LOG = LogFactory.getLog(ReplicationTrackerZKImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationTrackerZKImpl.class);
// All about stopping
private final Stoppable stopper;
// listeners to be notified
diff --git a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesImpl.java b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesImpl.java
index 546464344d..b6c849c735 100644
--- a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesImpl.java
+++ b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesImpl.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.hbase.replication;
import org.apache.commons.lang3.NotImplementedException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
@@ -42,6 +40,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
@@ -59,7 +59,7 @@ import java.util.TreeSet;
public class TableBasedReplicationQueuesImpl extends ReplicationTableBase
implements ReplicationQueues {
- private static final Log LOG = LogFactory.getLog(TableBasedReplicationQueuesImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableBasedReplicationQueuesImpl.class);
// Common byte values used in replication offset tracking
private static final byte[] INITIAL_OFFSET_BYTES = Bytes.toBytes(0L);
diff --git a/hbase-rest/pom.xml b/hbase-rest/pom.xml
index e7dc864bd1..d1036f2d8e 100644
--- a/hbase-rest/pom.xml
+++ b/hbase-rest/pom.xml
@@ -269,8 +269,8 @@
commons-lang3
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apijavax.xml.bind
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
index 921b17c97c..9f353aab6c 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
@@ -27,9 +27,9 @@ import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.rest.model.CellModel;
@@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.rest.model.RowModel;
@InterfaceAudience.Private
public class MultiRowResource extends ResourceBase implements Constants {
- private static final Log LOG = LogFactory.getLog(MultiRowResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MultiRowResource.class);
TableResource tableResource;
Integer versions = null;
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
index 4faf1d18eb..3ff25f99ef 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
@@ -34,11 +34,11 @@ import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.rest.model.NamespacesInstanceModel;
import org.apache.hadoop.hbase.rest.model.TableListModel;
@@ -57,7 +57,7 @@ import org.apache.hadoop.hbase.rest.model.TableModel;
@InterfaceAudience.Private
public class NamespacesInstanceResource extends ResourceBase {
- private static final Log LOG = LogFactory.getLog(NamespacesInstanceResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(NamespacesInstanceResource.class);
String namespace;
boolean queryTables = false;
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
index 4c5390aa6f..fe48bafd47 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
@@ -30,9 +30,9 @@ import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.rest.model.NamespacesModel;
/**
@@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.rest.model.NamespacesModel;
@InterfaceAudience.Private
public class NamespacesResource extends ResourceBase {
- private static final Log LOG = LogFactory.getLog(NamespacesResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(NamespacesResource.class);
/**
* Constructor
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingOutput.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingOutput.java
index b06704496d..5ea8a316d3 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingOutput.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingOutput.java
@@ -24,8 +24,6 @@ import java.util.List;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.StreamingOutput;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.client.Result;
@@ -34,9 +32,11 @@ import org.apache.hadoop.hbase.rest.model.CellModel;
import org.apache.hadoop.hbase.rest.model.CellSetModel;
import org.apache.hadoop.hbase.rest.model.RowModel;
import org.apache.hadoop.hbase.util.Bytes;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class ProtobufStreamingOutput implements StreamingOutput {
- private static final Log LOG = LogFactory.getLog(ProtobufStreamingOutput.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ProtobufStreamingOutput.class);
private String contentType;
private ResultScanner resultScanner;
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
index 360ab9d5e6..fecd1bdbc7 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
@@ -32,8 +32,6 @@ import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang3.ArrayUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -68,6 +66,8 @@ import org.eclipse.jetty.servlet.FilterHolder;
import org.glassfish.jersey.server.ResourceConfig;
import org.glassfish.jersey.servlet.ServletContainer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import javax.servlet.DispatcherType;
@@ -82,7 +82,7 @@ import javax.servlet.DispatcherType;
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
public class RESTServer implements Constants {
- static Log LOG = LogFactory.getLog("RESTServer");
+ static Logger LOG = LoggerFactory.getLogger("RESTServer");
static String REST_CSRF_ENABLED_KEY = "hbase.rest.csrf.enabled";
static boolean REST_CSRF_ENABLED_DEFAULT = false;
@@ -356,7 +356,7 @@ public class RESTServer implements Constants {
server.start();
server.join();
} catch (Exception e) {
- LOG.fatal("Failed to start server", e);
+ LOG.error("Failed to start server", e);
System.exit(1);
}
LOG.info("***** STOPPING service '" + RESTServer.class.getSimpleName() + "' *****");
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
index 1e5d4a98c5..b2fa16dde2 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
@@ -20,10 +20,10 @@ package org.apache.hadoop.hbase.rest;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.filter.ParseFilter;
@@ -38,7 +38,7 @@ import org.apache.hadoop.security.authorize.ProxyUsers;
*/
@InterfaceAudience.Private
public class RESTServlet implements Constants {
- private static final Log LOG = LogFactory.getLog(RESTServlet.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RESTServlet.class);
private static RESTServlet INSTANCE;
private final Configuration conf;
private final MetricsREST metrics;
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
index 183262d2d2..1e0f7beb9a 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
@@ -29,8 +29,6 @@ import javax.ws.rs.core.UriInfo;
import java.io.IOException;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
@@ -41,10 +39,12 @@ import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.rest.model.TableInfoModel;
import org.apache.hadoop.hbase.rest.model.TableRegionModel;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class RegionsResource extends ResourceBase {
- private static final Log LOG = LogFactory.getLog(RegionsResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionsResource.class);
static CacheControl cacheControl;
static {
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
index d2ddb0d76c..98217451c5 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
@@ -31,10 +31,9 @@ import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import javax.ws.rs.core.Response.ResponseBuilder;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.rest.model.TableListModel;
import org.apache.hadoop.hbase.rest.model.TableModel;
@@ -42,7 +41,7 @@ import org.apache.hadoop.hbase.rest.model.TableModel;
@Path("/")
@InterfaceAudience.Private
public class RootResource extends ResourceBase {
- private static final Log LOG = LogFactory.getLog(RootResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RootResource.class);
static CacheControl cacheControl;
static {
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
index dead804ca2..e06567b007 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
@@ -37,12 +37,12 @@ import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.UriInfo;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
@@ -59,7 +59,7 @@ import org.apache.hadoop.hbase.util.Bytes;
@InterfaceAudience.Private
public class RowResource extends ResourceBase {
- private static final Log LOG = LogFactory.getLog(RowResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RowResource.class);
private static final String CHECK_PUT = "put";
private static final String CHECK_DELETE = "delete";
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java
index 1edd73a063..9571c82d1b 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java
@@ -23,10 +23,10 @@ import java.io.IOException;
import java.util.Iterator;
import java.util.NoSuchElementException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.client.Get;
@@ -37,7 +37,7 @@ import org.apache.hadoop.util.StringUtils;
@InterfaceAudience.Private
public class RowResultGenerator extends ResultGenerator {
- private static final Log LOG = LogFactory.getLog(RowResultGenerator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RowResultGenerator.class);
private Iterator valuesI;
private Cell cache;
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
index 8f5611589b..b3e3985423 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
@@ -31,12 +31,12 @@ import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.UriInfo;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.rest.model.CellModel;
import org.apache.hadoop.hbase.rest.model.CellSetModel;
import org.apache.hadoop.hbase.rest.model.RowModel;
@@ -45,8 +45,8 @@ import org.apache.hadoop.hbase.util.Bytes;
@InterfaceAudience.Private
public class ScannerInstanceResource extends ResourceBase {
- private static final Log LOG =
- LogFactory.getLog(ScannerInstanceResource.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(ScannerInstanceResource.class);
static CacheControl cacheControl;
static {
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
index 60b348ee7f..d2b173fa0c 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
@@ -35,10 +35,9 @@ import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriBuilder;
import javax.ws.rs.core.UriInfo;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.rest.model.ScannerModel;
@@ -46,7 +45,7 @@ import org.apache.hadoop.hbase.rest.model.ScannerModel;
@InterfaceAudience.Private
public class ScannerResource extends ResourceBase {
- private static final Log LOG = LogFactory.getLog(ScannerResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ScannerResource.class);
static final Map scanners =
Collections.synchronizedMap(new HashMap());
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java
index ece4f1249b..b622fede6b 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java
@@ -22,14 +22,14 @@ package org.apache.hadoop.hbase.rest;
import java.io.IOException;
import java.util.Iterator;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.TableNotEnabledException;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.UnknownScannerException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
@@ -42,8 +42,8 @@ import org.apache.hadoop.util.StringUtils;
@InterfaceAudience.Private
public class ScannerResultGenerator extends ResultGenerator {
- private static final Log LOG =
- LogFactory.getLog(ScannerResultGenerator.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(ScannerResultGenerator.class);
public static Filter buildFilterFromModel(final ScannerModel model)
throws Exception {
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
index 8ce59eb483..e617cd4426 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
@@ -35,8 +35,6 @@ import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.UriInfo;
import javax.xml.namespace.QName;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableExistsException;
@@ -44,6 +42,8 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotEnabledException;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.rest.model.ColumnSchemaModel;
@@ -51,7 +51,7 @@ import org.apache.hadoop.hbase.rest.model.TableSchemaModel;
@InterfaceAudience.Private
public class SchemaResource extends ResourceBase {
- private static final Log LOG = LogFactory.getLog(SchemaResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SchemaResource.class);
static CacheControl cacheControl;
static {
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
index 460f86a68a..90ebccb47e 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
@@ -30,10 +30,9 @@ import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.UriInfo;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.ServerLoad;
import org.apache.hadoop.hbase.RegionLoad;
@@ -43,8 +42,8 @@ import org.apache.hadoop.hbase.rest.model.StorageClusterStatusModel;
@InterfaceAudience.Private
public class StorageClusterStatusResource extends ResourceBase {
- private static final Log LOG =
- LogFactory.getLog(StorageClusterStatusResource.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(StorageClusterStatusResource.class);
static CacheControl cacheControl;
static {
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
index 42f531cc7c..3d70410a40 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
@@ -30,17 +30,16 @@ import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import javax.ws.rs.core.Response.ResponseBuilder;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.ClusterStatus.Option;
import org.apache.hadoop.hbase.rest.model.StorageClusterVersionModel;
@InterfaceAudience.Private
public class StorageClusterVersionResource extends ResourceBase {
- private static final Log LOG =
- LogFactory.getLog(StorageClusterVersionResource.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(StorageClusterVersionResource.class);
static CacheControl cacheControl;
static {
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
index b32db7f19c..b52b91ba10 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
@@ -21,21 +21,16 @@ package org.apache.hadoop.hbase.rest;
import java.io.IOException;
import java.util.List;
-
import javax.ws.rs.DefaultValue;
import javax.ws.rs.Encoded;
-import javax.ws.rs.HeaderParam;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.UriInfo;
-
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.filter.Filter;
@@ -48,7 +43,7 @@ import org.apache.hadoop.hbase.util.Bytes;
public class TableResource extends ResourceBase {
String table;
- private static final Log LOG = LogFactory.getLog(TableResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableResource.class);
/**
* Constructor
@@ -206,7 +201,7 @@ public class TableResource extends ResourceBase {
} catch (IOException exp) {
servlet.getMetrics().incrementFailedScanRequests(1);
processException(exp);
- LOG.warn(exp);
+ LOG.warn(exp.toString(), exp);
return null;
}
}
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java
index f8b959331d..a30658c244 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java
@@ -22,7 +22,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
-
import javax.ws.rs.GET;
import javax.ws.rs.HeaderParam;
import javax.ws.rs.Produces;
@@ -36,9 +35,9 @@ import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.client.Result;
@@ -51,7 +50,7 @@ import com.fasterxml.jackson.annotation.JsonProperty;
@InterfaceAudience.Private
public class TableScanResource extends ResourceBase {
- private static final Log LOG = LogFactory.getLog(TableScanResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableScanResource.class);
TableResource tableResource;
ResultScanner results;
@@ -126,7 +125,7 @@ public class TableScanResource extends ResourceBase {
} catch (Exception exp) {
servlet.getMetrics().incrementFailedScanRequests(1);
processException(exp);
- LOG.warn(exp);
+ LOG.warn(exp.toString(), exp);
return null;
}
}
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
index 3847840893..c212334153 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
@@ -31,10 +31,9 @@ import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import javax.ws.rs.core.Response.ResponseBuilder;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.rest.model.VersionModel;
/**
@@ -47,7 +46,7 @@ import org.apache.hadoop.hbase.rest.model.VersionModel;
@InterfaceAudience.Private
public class VersionResource extends ResourceBase {
- private static final Log LOG = LogFactory.getLog(VersionResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(VersionResource.class);
static CacheControl cacheControl;
static {
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
index c756a79534..d8cf5f4a11 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
@@ -29,9 +29,9 @@ import java.util.Collections;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.http.Header;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
@@ -55,7 +55,7 @@ import org.apache.http.util.EntityUtils;
public class Client {
public static final Header[] EMPTY_HEADER_ARRAY = new Header[0];
- private static final Log LOG = LogFactory.getLog(Client.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Client.class);
private HttpClient httpClient;
private Cluster cluster;
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
index 71001b0fc0..bb48243ad7 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
@@ -23,8 +23,7 @@ import com.google.protobuf.Descriptors;
import com.google.protobuf.Message;
import com.google.protobuf.Service;
import com.google.protobuf.ServiceException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
@@ -35,6 +34,8 @@ import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability;
@@ -84,7 +85,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
@InterfaceAudience.Public
public class RemoteHTable implements Table {
- private static final Log LOG = LogFactory.getLog(RemoteHTable.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RemoteHTable.class);
final Client client;
final Configuration conf;
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Response.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Response.java
index 8f68f664ba..adffc126b7 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Response.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Response.java
@@ -22,9 +22,9 @@ package org.apache.hadoop.hbase.rest.client;
import java.io.IOException;
import java.io.InputStream;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.http.Header;
import org.apache.http.HttpResponse;
@@ -33,7 +33,7 @@ import org.apache.http.HttpResponse;
*/
@InterfaceAudience.Public
public class Response {
- private static final Log LOG = LogFactory.getLog(Response.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Response.class);
private int code;
private Header[] headers;
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
index f051bc8212..5dfa58caec 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
@@ -28,17 +28,17 @@ import java.util.Properties;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.util.DNS;
import org.apache.hadoop.hbase.util.Strings;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class AuthFilter extends AuthenticationFilter {
- private static final Log LOG = LogFactory.getLog(AuthFilter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AuthFilter.class);
private static final String REST_PREFIX = "hbase.rest.authentication.";
private static final int REST_PREFIX_LEN = REST_PREFIX.length();
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
index 76dc70e737..31a437a178 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
@@ -34,10 +34,10 @@ import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
/**
@@ -50,8 +50,8 @@ import org.apache.hadoop.conf.Configuration;
@InterfaceAudience.Public
public class RestCsrfPreventionFilter implements Filter {
- private static final Log LOG =
- LogFactory.getLog(RestCsrfPreventionFilter.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(RestCsrfPreventionFilter.class);
public static final String HEADER_USER_AGENT = "User-Agent";
public static final String BROWSER_USER_AGENT_PARAM =
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
index 4483bdbe50..882bd983d0 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
@@ -32,9 +32,9 @@ import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.ext.MessageBodyReader;
import javax.ws.rs.ext.Provider;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.rest.Constants;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
@@ -47,8 +47,8 @@ import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
@InterfaceAudience.Private
public class ProtobufMessageBodyConsumer
implements MessageBodyReader {
- private static final Log LOG =
- LogFactory.getLog(ProtobufMessageBodyConsumer.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(ProtobufMessageBodyConsumer.class);
@Override
public boolean isReadable(Class> type, Type genericType,
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/DummyFilter.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/DummyFilter.java
index 7ad162431a..5af8ee2bfa 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/DummyFilter.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/DummyFilter.java
@@ -28,11 +28,11 @@ import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class DummyFilter implements Filter {
- private static final Log LOG = LogFactory.getLog(DummyFilter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DummyFilter.class);
@Override
public void destroy() {
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java
index 4cce21b369..273010a334 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.rest;
import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import org.apache.commons.lang3.ArrayUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.http.HttpServerUtil;
@@ -36,6 +34,8 @@ import org.eclipse.jetty.servlet.ServletHolder;
import org.glassfish.jersey.server.ResourceConfig;
import org.glassfish.jersey.servlet.ServletContainer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import javax.servlet.DispatcherType;
import java.util.Arrays;
@@ -43,7 +43,7 @@ import java.util.EnumSet;
public class HBaseRESTTestingUtility {
- private static final Log LOG = LogFactory.getLog(HBaseRESTTestingUtility.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HBaseRESTTestingUtility.class);
private int testServletPort;
private Server server;
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java
index 476594e080..8dc8cdd105 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java
@@ -34,8 +34,6 @@ import java.util.TreeMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FSDataInputStream;
@@ -94,6 +92,8 @@ import org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer;
import org.apache.hadoop.util.LineReader;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Script used evaluating Stargate performance and scalability. Runs a SG
@@ -112,7 +112,7 @@ import org.apache.hadoop.util.ToolRunner;
* runs an individual client. Each client does about 1GB of data.
*/
public class PerformanceEvaluation extends Configured implements Tool {
- protected static final Log LOG = LogFactory.getLog(PerformanceEvaluation.class.getName());
+ protected static final Logger LOG = LoggerFactory.getLogger(PerformanceEvaluation.class.getName());
private static final int DEFAULT_ROW_PREFIX_LENGTH = 16;
private static final int ROW_LENGTH = 1000;
@@ -1214,7 +1214,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
}
static class FilteredScanTest extends TableTest {
- protected static final Log LOG = LogFactory.getLog(FilteredScanTest.class.getName());
+ protected static final Logger LOG = LoggerFactory.getLogger(FilteredScanTest.class.getName());
FilteredScanTest(Configuration conf, TestOptions options, Status status) {
super(conf, options, status);
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java
index 2b2e5e3700..d62fba58ba 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java
@@ -38,8 +38,6 @@ import javax.xml.bind.Marshaller;
import javax.xml.bind.Unmarshaller;
import org.apache.http.Header;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -66,10 +64,12 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({RestTests.class, MediumTests.class})
public class TestScannerResource {
- private static final Log LOG = LogFactory.getLog(TestScannerResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestScannerResource.class);
private static final TableName TABLE = TableName.valueOf("TestScannerResource");
private static final TableName TABLE_TO_BE_DISABLED = TableName.valueOf("ScannerResourceDisable");
private static final String NONEXISTENT_TABLE = "ThisTableDoesNotExist";
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java
index c8bbc24149..614b1a10ef 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java
@@ -34,8 +34,6 @@ import javax.xml.bind.JAXBContext;
import javax.xml.bind.Marshaller;
import javax.xml.bind.Unmarshaller;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -79,11 +77,13 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({RestTests.class, MediumTests.class})
public class TestScannersWithFilters {
- private static final Log LOG = LogFactory.getLog(TestScannersWithFilters.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestScannersWithFilters.class);
private static final TableName TABLE = TableName.valueOf("TestScannersWithFilters");
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java
index 4866d53c2f..04b23fa59f 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java
@@ -26,8 +26,6 @@ import java.util.Collection;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.http.Header;
import org.apache.http.message.BasicHeader;
@@ -55,11 +53,13 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({RestTests.class, MediumTests.class})
@RunWith(Parameterized.class)
public class TestSchemaResource {
- private static final Log LOG = LogFactory.getLog(TestSchemaResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSchemaResource.class);
private static String TABLE1 = "TestSchemaResource1";
private static String TABLE2 = "TestSchemaResource2";
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestStatusResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestStatusResource.java
index ca3b82f4ef..e86a4f8c0a 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestStatusResource.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestStatusResource.java
@@ -24,8 +24,6 @@ import java.io.IOException;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName;
@@ -44,10 +42,12 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({RestTests.class, MediumTests.class})
public class TestStatusResource {
- private static final Log LOG = LogFactory.getLog(TestStatusResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestStatusResource.class);
private static final byte[] META_REGION_NAME = Bytes.toBytes(TableName.META_TABLE_NAME + ",,1");
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java
index 26891774b7..55913499ef 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java
@@ -32,8 +32,6 @@ import java.util.List;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
@@ -64,10 +62,12 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({RestTests.class, MediumTests.class})
public class TestTableResource {
- private static final Log LOG = LogFactory.getLog(TestTableResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestTableResource.class);
private static TableName TABLE = TableName.valueOf("TestTableResource");
private static String COLUMN_FAMILY = "test";
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java
index 8380a0a4fb..a10fef013c 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java
@@ -43,8 +43,6 @@ import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.parsers.SAXParserFactory;
import javax.xml.stream.XMLStreamException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -67,6 +65,8 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.xml.sax.InputSource;
import org.xml.sax.XMLReader;
@@ -78,7 +78,7 @@ import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
@Category({RestTests.class, MediumTests.class})
public class TestTableScan {
- private static final Log LOG = LogFactory.getLog(TestTableScan.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestTableScan.class);
private static final TableName TABLE = TableName.valueOf("TestScanResource");
private static final String CFA = "a";
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
index 1f927f597e..50cb0854b3 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
@@ -25,8 +25,6 @@ import javax.ws.rs.core.MediaType;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.rest.client.Client;
import org.apache.hadoop.hbase.rest.client.Cluster;
@@ -48,10 +46,12 @@ import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({RestTests.class, MediumTests.class})
public class TestVersionResource {
- private static final Log LOG = LogFactory.getLog(TestVersionResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestVersionResource.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final HBaseRESTTestingUtility REST_TEST_UTIL =
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestXmlParsing.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestXmlParsing.java
index 586e33c183..cf5519e7a7 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestXmlParsing.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestXmlParsing.java
@@ -25,8 +25,6 @@ import static org.mockito.Mockito.when;
import java.io.IOException;
import javax.xml.bind.UnmarshalException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.rest.Constants;
import org.apache.hadoop.hbase.rest.model.StorageClusterVersionModel;
@@ -34,13 +32,15 @@ import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.util.StringUtils;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test class for {@link RemoteAdmin} to verify XML is parsed in a certain manner.
*/
@Category(SmallTests.class)
public class TestXmlParsing {
- private static final Log LOG = LogFactory.getLog(TestXmlParsing.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestXmlParsing.class);
@Test
public void testParsingClusterVersion() throws Exception {
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java
index c41128d737..81de4361bb 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java
@@ -23,16 +23,16 @@ import java.util.Iterator;
import javax.xml.bind.JAXBContext;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.testclassification.RestTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({RestTests.class, SmallTests.class})
public class TestTableSchemaModel extends TestModelBase {
- private static final Log LOG = LogFactory.getLog(TestTableSchemaModel.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestTableSchemaModel.class);
public static final String TABLE_NAME = "testTable";
private static final boolean IS_META = false;
diff --git a/hbase-rsgroup/pom.xml b/hbase-rsgroup/pom.xml
index c53dc953e5..dba581046e 100644
--- a/hbase-rsgroup/pom.xml
+++ b/hbase-rsgroup/pom.xml
@@ -129,8 +129,8 @@
commons-lang3
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apiorg.apache.hbase.thirdparty
diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
index e332f5ccb0..75588408e4 100644
--- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
+++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
@@ -29,8 +29,7 @@ import java.util.stream.Collectors;
import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
import com.google.protobuf.Service;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.NamespaceDescriptor;
@@ -77,12 +76,14 @@ import org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.RemoveServe
import org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.RemoveServersResponse;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
// TODO: Encapsulate MasterObserver functions into separate subclass.
@CoreCoprocessor
@InterfaceAudience.Private
public class RSGroupAdminEndpoint implements MasterCoprocessor, MasterObserver {
- private static final Log LOG = LogFactory.getLog(RSGroupAdminEndpoint.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RSGroupAdminEndpoint.class);
private MasterServices master = null;
// Only instance of RSGroupInfoManager. RSGroup aware load balancers ask for this instance on
diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
index 45421e325b..b4d35e306c 100644
--- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
+++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
@@ -29,8 +29,6 @@ import java.util.Map;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
@@ -48,13 +46,15 @@ import org.apache.hadoop.hbase.net.Address;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Service to support Region Server Grouping (HBase-6721).
*/
@InterfaceAudience.Private
public class RSGroupAdminServer implements RSGroupAdmin {
- private static final Log LOG = LogFactory.getLog(RSGroupAdminServer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RSGroupAdminServer.class);
private MasterServices master;
private final RSGroupInfoManager rsGroupInfoManager;
diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
index d838edb94c..60af99321e 100644
--- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
+++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
@@ -29,8 +29,7 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.HBaseIOException;
@@ -52,6 +51,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* GroupBasedLoadBalancer, used when Region Server Grouping is configured (HBase-6721)
@@ -69,7 +70,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class RSGroupBasedLoadBalancer implements RSGroupableBalancer {
- private static final Log LOG = LogFactory.getLog(RSGroupBasedLoadBalancer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RSGroupBasedLoadBalancer.class);
private Configuration config;
private ClusterStatus clusterStatus;
diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
index 4d8ff92609..67dfde729f 100644
--- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
+++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
@@ -34,8 +34,6 @@ import java.util.SortedSet;
import java.util.TreeSet;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
@@ -81,7 +79,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
@@ -116,7 +115,7 @@ import com.google.protobuf.ServiceException;
*/
@InterfaceAudience.Private
class RSGroupInfoManagerImpl implements RSGroupInfoManager {
- private static final Log LOG = LogFactory.getLog(RSGroupInfoManagerImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RSGroupInfoManagerImpl.class);
/** Table descriptor for hbase:rsgroup catalog table */
private final static HTableDescriptor RSGROUP_TABLE_DESC;
@@ -624,7 +623,7 @@ class RSGroupInfoManagerImpl implements RSGroupInfoManager {
* done asynchronously in this thread.
*/
private class ServerEventsListenerThread extends Thread implements ServerListener {
- private final Log LOG = LogFactory.getLog(ServerEventsListenerThread.class);
+ private final Logger LOG = LoggerFactory.getLogger(ServerEventsListenerThread.class);
private boolean changed = false;
ServerEventsListenerThread() {
@@ -738,7 +737,7 @@ class RSGroupInfoManagerImpl implements RSGroupInfoManager {
}
private class RSGroupStartupWorker extends Thread {
- private final Log LOG = LogFactory.getLog(RSGroupStartupWorker.class);
+ private final Logger LOG = LoggerFactory.getLogger(RSGroupStartupWorker.class);
private volatile boolean online = false;
RSGroupStartupWorker() {
diff --git a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRSGroupBasedLoadBalancer.java b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRSGroupBasedLoadBalancer.java
index 5ce0c09f67..797022c169 100644
--- a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRSGroupBasedLoadBalancer.java
+++ b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRSGroupBasedLoadBalancer.java
@@ -36,8 +36,6 @@ import java.util.TreeMap;
import java.util.TreeSet;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HTableDescriptor;
@@ -63,7 +61,8 @@ import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@@ -71,7 +70,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@Category(SmallTests.class)
public class TestRSGroupBasedLoadBalancer {
- private static final Log LOG = LogFactory.getLog(TestRSGroupBasedLoadBalancer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRSGroupBasedLoadBalancer.class);
private static RSGroupBasedLoadBalancer loadBalancer;
private static SecureRandom rand;
diff --git a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java
index f2ae112c60..378c1ab0a9 100644
--- a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java
+++ b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java
@@ -24,8 +24,6 @@ import static org.junit.Assert.fail;
import java.io.IOException;
import java.util.Iterator;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
@@ -53,12 +51,13 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
@Category({MediumTests.class})
public class TestRSGroups extends TestRSGroupsBase {
- protected static final Log LOG = LogFactory.getLog(TestRSGroups.class);
+ protected static final Logger LOG = LoggerFactory.getLogger(TestRSGroups.class);
private static HMaster master;
private static boolean INIT = false;
private static RSGroupAdminEndpoint rsGroupAdminEndpoint;
diff --git a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
index bbcf1206af..cd8c3869e5 100644
--- a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
+++ b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
@@ -34,8 +34,6 @@ import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.ClusterStatus.Option;
import org.apache.hadoop.hbase.HBaseCluster;
@@ -59,7 +57,8 @@ import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -67,7 +66,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequest;
public abstract class TestRSGroupsBase {
- protected static final Log LOG = LogFactory.getLog(TestRSGroupsBase.class);
+ protected static final Logger LOG = LoggerFactory.getLogger(TestRSGroupsBase.class);
@Rule
public TestName name = new TestName();
diff --git a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsOfflineMode.java b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsOfflineMode.java
index 6f7b47d554..8b4f195bf4 100644
--- a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsOfflineMode.java
+++ b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsOfflineMode.java
@@ -18,8 +18,6 @@
package org.apache.hadoop.hbase.rsgroup;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseCluster;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
@@ -41,7 +39,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
@@ -56,7 +55,7 @@ import static org.junit.Assert.assertFalse;
// assignment with a timeout.
@Category(MediumTests.class)
public class TestRSGroupsOfflineMode {
- private static final Log LOG = LogFactory.getLog(TestRSGroupsOfflineMode.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRSGroupsOfflineMode.class);
private static HMaster master;
private static Admin hbaseAdmin;
private static HBaseTestingUtility TEST_UTIL;
diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml
index 52e8a522b7..9479b09856 100644
--- a/hbase-server/pom.xml
+++ b/hbase-server/pom.xml
@@ -492,8 +492,8 @@
commons-lang3
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apiorg.apache.commons
@@ -519,6 +519,11 @@
javax.ws.rsjavax.ws.rs-api
+
+ org.powermock
+ powermock-core
+ 1.7.3
+
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthCheckChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthCheckChore.java
index 5f3531d2e0..8ab139f63c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthCheckChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthCheckChore.java
@@ -17,17 +17,17 @@
*/
package org.apache.hadoop.hbase;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HealthChecker.HealthCheckerExitStatus;
import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* The Class HealthCheckChore for running health checker regularly.
*/
public class HealthCheckChore extends ScheduledChore {
- private static final Log LOG = LogFactory.getLog(HealthCheckChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HealthCheckChore.class);
private HealthChecker healthChecker;
private Configuration config;
private int threshold;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthChecker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthChecker.java
index 45e0f3aff9..a43a51d865 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthChecker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthChecker.java
@@ -20,10 +20,10 @@ package org.apache.hadoop.hbase;
import java.io.IOException;
import java.util.ArrayList;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.util.Shell.ExitCodeException;
import org.apache.hadoop.util.Shell.ShellCommandExecutor;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A utility for executing an external script that checks the health of
@@ -33,7 +33,7 @@ import org.apache.hadoop.util.Shell.ShellCommandExecutor;
*/
class HealthChecker {
- private static final Log LOG = LogFactory.getLog(HealthChecker.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HealthChecker.class);
private ShellCommandExecutor shexec = null;
private String exceptionStackTrace;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java
index 1472057027..6fdc77e34d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java
@@ -19,11 +19,12 @@
package org.apache.hadoop.hbase;
import com.google.protobuf.Service;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.lang.management.ManagementFactory;
@@ -49,7 +50,7 @@ import javax.management.remote.rmi.RMIConnectorServer;
* 3)support subset of SSL (with default configuration)
*/
public class JMXListener implements MasterCoprocessor, RegionServerCoprocessor {
- private static final Log LOG = LogFactory.getLog(JMXListener.class);
+ private static final Logger LOG = LoggerFactory.getLogger(JMXListener.class);
public static final String RMI_REGISTRY_PORT_CONF_KEY = ".rmi.registry.port";
public static final String RMI_CONNECTOR_PORT_CONF_KEY = ".rmi.connector.port";
public static final int defMasterRMIRegistryPort = 10101;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java
index e43d33bb0e..06199f72d6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java
@@ -24,9 +24,9 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
@@ -58,7 +58,7 @@ import org.apache.hadoop.hbase.util.JVMClusterUtil;
*/
@InterfaceAudience.Public
public class LocalHBaseCluster {
- private static final Log LOG = LogFactory.getLog(LocalHBaseCluster.class);
+ private static final Logger LOG = LoggerFactory.getLogger(LocalHBaseCluster.class);
private final List masterThreads = new CopyOnWriteArrayList<>();
private final List regionThreads = new CopyOnWriteArrayList<>();
private final static int DEFAULT_NO = 1;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ZKNamespaceManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ZKNamespaceManager.java
index de21ce082d..11d26d9514 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ZKNamespaceManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ZKNamespaceManager.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
@@ -30,6 +28,8 @@ import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.hadoop.hbase.zookeeper.ZKListener;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.List;
@@ -49,7 +49,7 @@ import java.util.concurrent.ConcurrentSkipListMap;
*/
@InterfaceAudience.Private
public class ZKNamespaceManager extends ZKListener {
- private static final Log LOG = LogFactory.getLog(ZKNamespaceManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ZKNamespaceManager.class);
private final String nsZNode;
private volatile NavigableMap cache;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ZNodeClearer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ZNodeClearer.java
index cda5affac4..3e911a8dd6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ZNodeClearer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ZNodeClearer.java
@@ -26,8 +26,6 @@ import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer;
import org.apache.hadoop.hbase.zookeeper.MasterAddressTracker;
@@ -35,6 +33,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
*
Contains a set of methods for the collaboration between the start/stop scripts and the
@@ -49,7 +49,7 @@ import org.apache.zookeeper.KeeperException;
* check its content to make sure that the backup server is not now in charge.
*/
public class ZNodeClearer {
- private static final Log LOG = LogFactory.getLog(ZNodeClearer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ZNodeClearer.class);
private ZNodeClearer() {}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java
index 4da1235fd4..354a63caa6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java
@@ -25,8 +25,6 @@ import java.util.Collection;
import java.util.Collections;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -41,7 +39,8 @@ import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.HFileArchiveUtil;
import org.apache.hadoop.io.MultipleIOException;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Function;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Collections2;
@@ -54,7 +53,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
*/
@InterfaceAudience.Private
public class HFileArchiver {
- private static final Log LOG = LogFactory.getLog(HFileArchiver.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HFileArchiver.class);
private static final String SEPARATOR = ".";
/** Number of retries in case of fs operation failure */
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveManager.java
index 389dea7fc0..c51d4937a1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveManager.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.backup.example;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
@@ -30,6 +28,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Client-side manager for which table's hfiles should be preserved for long-term archive.
@@ -41,7 +41,7 @@ import org.apache.zookeeper.KeeperException;
class HFileArchiveManager {
private final String archiveZnode;
- private static final Log LOG = LogFactory.getLog(HFileArchiveManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HFileArchiveManager.class);
private final ZKWatcher zooKeeper;
private volatile boolean stopped = false;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveTableMonitor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveTableMonitor.java
index 3a1653417d..93c9690d79 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveTableMonitor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveTableMonitor.java
@@ -21,8 +21,8 @@ import java.util.List;
import java.util.Set;
import java.util.TreeSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Monitor the actual tables for which HFiles are archived for long-term retention (always kept
@@ -31,7 +31,7 @@ import org.apache.commons.logging.LogFactory;
* It is internally synchronized to ensure consistent view of the table state.
*/
public class HFileArchiveTableMonitor {
- private static final Log LOG = LogFactory.getLog(HFileArchiveTableMonitor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HFileArchiveTableMonitor.class);
private final Set archivedTables = new TreeSet<>();
/**
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/LongTermArchivingHFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/LongTermArchivingHFileCleaner.java
index ff7a51d0a8..484ff5ea17 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/LongTermArchivingHFileCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/LongTermArchivingHFileCleaner.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.backup.example;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
@@ -30,6 +28,8 @@ import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.master.cleaner.BaseHFileCleanerDelegate;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* {@link BaseHFileCleanerDelegate} that only cleans HFiles that don't belong to a table that is
@@ -44,7 +44,7 @@ import org.apache.zookeeper.KeeperException;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class LongTermArchivingHFileCleaner extends BaseHFileCleanerDelegate {
- private static final Log LOG = LogFactory.getLog(LongTermArchivingHFileCleaner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(LongTermArchivingHFileCleaner.class);
TableHFileArchiveTracker archiveTracker;
private FileSystem fs;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/TableHFileArchiveTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/TableHFileArchiveTracker.java
index 73b50a652d..1b3b775b2a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/TableHFileArchiveTracker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/TableHFileArchiveTracker.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.backup.example;
import java.io.IOException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
@@ -29,6 +27,8 @@ import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZKListener;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Track HFile archiving state changes in ZooKeeper. Keeps track of the tables whose HFiles should
@@ -39,7 +39,7 @@ import org.apache.zookeeper.KeeperException;
*/
@InterfaceAudience.Private
public class TableHFileArchiveTracker extends ZKListener {
- private static final Log LOG = LogFactory.getLog(TableHFileArchiveTracker.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableHFileArchiveTracker.class);
public static final String HFILE_ARCHIVE_ZNODE_PARENT = "hfilearchive";
private HFileArchiveTableMonitor monitor;
private String archiveHFileZNode;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java
index 529a2f93d3..7a1a57814f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java
@@ -22,8 +22,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -34,6 +32,8 @@ import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.RegionScanner;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A client scanner for a region opened for read-only on the client side. Assumes region data
@@ -42,7 +42,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class ClientSideRegionScanner extends AbstractClientScanner {
- private static final Log LOG = LogFactory.getLog(ClientSideRegionScanner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ClientSideRegionScanner.class);
private HRegion region;
RegionScanner scanner;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/TableSnapshotScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/TableSnapshotScanner.java
index 36b2bb282d..93b1a4024c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/TableSnapshotScanner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/TableSnapshotScanner.java
@@ -24,8 +24,6 @@ import java.util.Collections;
import java.util.List;
import java.util.UUID;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -34,6 +32,8 @@ import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A Scanner which performs a scan over snapshot files. Using this class requires copying the
@@ -65,7 +65,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class TableSnapshotScanner extends AbstractClientScanner {
- private static final Log LOG = LogFactory.getLog(TableSnapshotScanner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableSnapshotScanner.class);
private Configuration conf;
private String snapshotName;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/locking/EntityLock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/locking/EntityLock.java
index 94a573c638..61a0238bca 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/locking/EntityLock.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/locking/EntityLock.java
@@ -23,11 +23,11 @@ import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse;
@@ -81,7 +81,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Public
public class EntityLock {
- private static final Log LOG = LogFactory.getLog(EntityLock.class);
+ private static final Logger LOG = LoggerFactory.getLogger(EntityLock.class);
public static final String HEARTBEAT_TIME_BUFFER =
"hbase.client.locks.heartbeat.time.buffer.ms";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/conf/ConfigurationManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/conf/ConfigurationManager.java
index 1fa70f48ce..2bbb90bbf6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/conf/ConfigurationManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/conf/ConfigurationManager.java
@@ -17,11 +17,11 @@
*/
package org.apache.hadoop.hbase.conf;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.util.Collections;
import java.util.Set;
@@ -73,7 +73,7 @@ import java.util.WeakHashMap;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class ConfigurationManager {
- private static final Log LOG = LogFactory.getLog(ConfigurationManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ConfigurationManager.class);
// The set of Configuration Observers. These classes would like to get
// notified when the configuration is reloaded from disk. This is a set
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/ConstraintProcessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/ConstraintProcessor.java
index 582fabf4eb..6aa5d977b6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/ConstraintProcessor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/ConstraintProcessor.java
@@ -22,9 +22,9 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.client.Put;
@@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.wal.WALEdit;
@InterfaceAudience.Private
public class ConstraintProcessor implements RegionCoprocessor, RegionObserver {
- private static final Log LOG = LogFactory.getLog(ConstraintProcessor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ConstraintProcessor.class);
private final ClassLoader classloader;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java
index e675cc9c4a..426e516153 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java
@@ -29,9 +29,9 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptor;
@@ -53,7 +53,7 @@ public final class Constraints {
private Constraints() {
}
- private static final Log LOG = LogFactory.getLog(Constraints.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Constraints.class);
private static final String CONSTRAINT_HTD_KEY_PREFIX = "constraint $";
private static final Pattern CONSTRAINT_HTD_ATTR_KEY_PATTERN = Pattern
.compile(CONSTRAINT_HTD_KEY_PREFIX, Pattern.LITERAL);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/ZKSplitLogManagerCoordination.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/ZKSplitLogManagerCoordination.java
index 8a07b4b7c1..c4be4b79d5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/ZKSplitLogManagerCoordination.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/ZKSplitLogManagerCoordination.java
@@ -29,8 +29,6 @@ import static org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent;
import java.io.IOException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
@@ -57,7 +55,8 @@ import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.KeeperException.NoNodeException;
import org.apache.zookeeper.ZooDefs.Ids;
import org.apache.zookeeper.data.Stat;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -72,7 +71,7 @@ public class ZKSplitLogManagerCoordination extends ZKListener implements
public static final int DEFAULT_ZK_RETRIES = 3;
public static final int DEFAULT_MAX_RESUBMIT = 3;
- private static final Log LOG = LogFactory.getLog(SplitLogManagerCoordination.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SplitLogManagerCoordination.class);
private final TaskFinisher taskFinisher;
private final Configuration conf;
@@ -301,7 +300,7 @@ public class ZKSplitLogManagerCoordination extends ZKListener implements
}
private void createRescanFailure() {
- LOG.fatal("logic failure, rescan failure must not happen");
+ LOG.error("logic failure, rescan failure must not happen");
}
/**
@@ -353,7 +352,7 @@ public class ZKSplitLogManagerCoordination extends ZKListener implements
return;
}
SplitLogCounters.tot_mgr_null_data.increment();
- LOG.fatal("logic error - got null data " + path);
+ LOG.error("logic error - got null data " + path);
setDone(path, FAILURE);
return;
}
@@ -382,7 +381,7 @@ public class ZKSplitLogManagerCoordination extends ZKListener implements
LOG.info("task " + path + " entered state: " + slt.toString());
resubmitOrFail(path, CHECK);
} else {
- LOG.fatal("logic error - unexpected zk state for path = " + path + " data = "
+ LOG.error("logic error - unexpected zk state for path = " + path + " data = "
+ slt.toString());
setDone(path, FAILURE);
}
@@ -573,7 +572,7 @@ public class ZKSplitLogManagerCoordination extends ZKListener implements
* Asynchronous handler for zk create node results. Retries on failures.
*/
public class CreateAsyncCallback implements AsyncCallback.StringCallback {
- private final Log LOG = LogFactory.getLog(CreateAsyncCallback.class);
+ private final Logger LOG = LoggerFactory.getLogger(CreateAsyncCallback.class);
@Override
public void processResult(int rc, String path, Object ctx, String name) {
@@ -614,7 +613,7 @@ public class ZKSplitLogManagerCoordination extends ZKListener implements
* Asynchronous handler for zk get-data-set-watch on node results. Retries on failures.
*/
public class GetDataAsyncCallback implements AsyncCallback.DataCallback {
- private final Log LOG = LogFactory.getLog(GetDataAsyncCallback.class);
+ private final Logger LOG = LoggerFactory.getLogger(GetDataAsyncCallback.class);
@Override
public void processResult(int rc, String path, Object ctx, byte[] data, Stat stat) {
@@ -662,7 +661,7 @@ public class ZKSplitLogManagerCoordination extends ZKListener implements
* Asynchronous handler for zk delete node results. Retries on failures.
*/
public class DeleteAsyncCallback implements AsyncCallback.VoidCallback {
- private final Log LOG = LogFactory.getLog(DeleteAsyncCallback.class);
+ private final Logger LOG = LoggerFactory.getLogger(DeleteAsyncCallback.class);
@Override
public void processResult(int rc, String path, Object ctx) {
@@ -704,7 +703,7 @@ public class ZKSplitLogManagerCoordination extends ZKListener implements
* {@link org.apache.hadoop.hbase.regionserver.SplitLogWorker}s to rescan for new tasks.
*/
public class CreateRescanAsyncCallback implements AsyncCallback.StringCallback {
- private final Log LOG = LogFactory.getLog(CreateRescanAsyncCallback.class);
+ private final Logger LOG = LoggerFactory.getLogger(CreateRescanAsyncCallback.class);
@Override
public void processResult(int rc, String path, Object ctx, String name) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/ZkSplitLogWorkerCoordination.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/ZkSplitLogWorkerCoordination.java
index 0540a8f858..d1a959951a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/ZkSplitLogWorkerCoordination.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/ZkSplitLogWorkerCoordination.java
@@ -26,8 +26,6 @@ import java.util.concurrent.atomic.LongAdder;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.mutable.MutableInt;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -54,6 +52,8 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.AsyncCallback;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.data.Stat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* ZooKeeper based implementation of {@link SplitLogWorkerCoordination}
@@ -64,7 +64,7 @@ import org.apache.zookeeper.data.Stat;
public class ZkSplitLogWorkerCoordination extends ZKListener implements
SplitLogWorkerCoordination {
- private static final Log LOG = LogFactory.getLog(ZkSplitLogWorkerCoordination.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ZkSplitLogWorkerCoordination.class);
private static final int checkInterval = 5000; // 5 seconds
private static final int FAILED_TO_OWN_TASK = -1;
@@ -539,7 +539,7 @@ public class ZkSplitLogWorkerCoordination extends ZKListener implements
* Asynchronous handler for zk get-data-set-watch on node results.
*/
class GetDataAsyncCallback implements AsyncCallback.DataCallback {
- private final Log LOG = LogFactory.getLog(GetDataAsyncCallback.class);
+ private final Logger LOG = LoggerFactory.getLogger(GetDataAsyncCallback.class);
@Override
public void processResult(int rc, String path, Object ctx, byte[] data, Stat stat) {
@@ -580,7 +580,7 @@ public class ZkSplitLogWorkerCoordination extends ZKListener implements
LOG.warn("transisition task " + task + " to " + slt + " failed because of version mismatch",
bve);
} catch (KeeperException.NoNodeException e) {
- LOG.fatal(
+ LOG.error(
"logic error - end task " + task + " " + slt + " failed because task doesn't exist", e);
} catch (KeeperException e) {
LOG.warn("failed to end task, " + task + " " + slt, e);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEnvironment.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEnvironment.java
index 9f5ca231f5..2818dcd675 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEnvironment.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEnvironment.java
@@ -19,13 +19,13 @@
package org.apache.hadoop.hbase.coprocessor;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.util.VersionInfo;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
@@ -34,7 +34,7 @@ import java.io.IOException;
*/
@InterfaceAudience.Private
public class BaseEnvironment implements CoprocessorEnvironment {
- private static final Log LOG = LogFactory.getLog(BaseEnvironment.class);
+ private static final Logger LOG = LoggerFactory.getLogger(BaseEnvironment.class);
/** The coprocessor */
public C impl;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java
index f2d9b2a877..9489d69dac 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java
@@ -32,9 +32,9 @@ import java.util.concurrent.ConcurrentSkipListSet;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Abortable;
@@ -75,7 +75,7 @@ public abstract class CoprocessorHost coprocEnvironments =
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignExceptionDispatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignExceptionDispatcher.java
index 75117fd6e8..87f7c00064 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignExceptionDispatcher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignExceptionDispatcher.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.errorhandling;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* The dispatcher acts as the state holding entity for foreign error handling. The first
@@ -40,7 +40,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class ForeignExceptionDispatcher implements ForeignExceptionListener, ForeignExceptionSnare {
- private static final Log LOG = LogFactory.getLog(ForeignExceptionDispatcher.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ForeignExceptionDispatcher.class);
protected final String name;
protected final List listeners = new ArrayList<>();
private ForeignException exception;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/TimeoutExceptionInjector.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/TimeoutExceptionInjector.java
index 294e108ebd..36182d677d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/TimeoutExceptionInjector.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/TimeoutExceptionInjector.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.errorhandling;
import java.util.Timer;
import java.util.TimerTask;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
/**
@@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@InterfaceAudience.Private
public class TimeoutExceptionInjector {
- private static final Log LOG = LogFactory.getLog(TimeoutExceptionInjector.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TimeoutExceptionInjector.class);
private final long maxTime;
private volatile boolean complete;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java
index 1056c20453..eb94744299 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java
@@ -21,14 +21,14 @@ package org.apache.hadoop.hbase.executor;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.trace.TraceUtil;
import org.apache.htrace.core.Span;
import org.apache.htrace.core.TraceScope;
import org.apache.htrace.core.Tracer;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Abstract base class for all HBase event handlers. Subclasses should
@@ -53,7 +53,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public abstract class EventHandler implements Runnable, Comparable {
- private static final Log LOG = LogFactory.getLog(EventHandler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(EventHandler.class);
// type of event this object represents
protected EventType eventType;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java
index 7117d360d1..4cd800c178 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java
@@ -32,9 +32,9 @@ import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.monitoring.ThreadMonitoring;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
@@ -56,7 +56,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa
*/
@InterfaceAudience.Private
public class ExecutorService {
- private static final Log LOG = LogFactory.getLog(ExecutorService.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ExecutorService.class);
// hold the all the executors created in a map addressable by their names
private final ConcurrentHashMap executorMap = new ConcurrentHashMap<>();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.java
index 14b2466aeb..67f0486365 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.java
@@ -32,8 +32,6 @@ import java.util.Random;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.CellBuilderFactory;
@@ -52,7 +50,8 @@ import org.apache.hadoop.hbase.master.RackManager;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -68,7 +67,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNode
*/
@InterfaceAudience.Private
public class FavoredNodeAssignmentHelper {
- private static final Log LOG = LogFactory.getLog(FavoredNodeAssignmentHelper.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FavoredNodeAssignmentHelper.class);
private RackManager rackManager;
private Map> rackToRegionServerMap;
private List uniqueRackList;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java
index 68e5e897d3..a2cfa8543a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java
@@ -29,8 +29,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@@ -46,7 +44,8 @@ import org.apache.hadoop.hbase.master.SnapshotOfRegionAssignmentFromMeta;
import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
@@ -67,7 +66,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class FavoredNodeLoadBalancer extends BaseLoadBalancer implements FavoredNodesPromoter {
- private static final Log LOG = LogFactory.getLog(FavoredNodeLoadBalancer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FavoredNodeLoadBalancer.class);
private RackManager rackManager;
private Configuration conf;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodesManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodesManager.java
index 7705b3d1b7..e0b9dc5b5d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodesManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodesManager.java
@@ -32,8 +32,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.ServerName;
@@ -45,7 +43,8 @@ import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.net.NetUtils;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
@@ -64,7 +63,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
@InterfaceAudience.Private
public class FavoredNodesManager {
- private static final Log LOG = LogFactory.getLog(FavoredNodesManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FavoredNodesManager.class);
private FavoredNodesPlan globalFavoredNodesAssignmentPlan;
private Map> primaryRSToRegionMap;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java
index c48d9d679f..0723f855af 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java
@@ -31,8 +31,6 @@ import java.lang.reflect.Proxy;
import java.lang.reflect.UndeclaredThrowableException;
import java.net.URI;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
@@ -54,6 +52,8 @@ import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockStoragePolicySuite;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.util.Progressable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import edu.umd.cs.findbugs.annotations.Nullable;
@@ -63,7 +63,7 @@ import edu.umd.cs.findbugs.annotations.Nullable;
* separate filesystem objects for reading and writing hfiles and wals.
*/
public class HFileSystem extends FilterFileSystem {
- public static final Log LOG = LogFactory.getLog(HFileSystem.class);
+ public static final Logger LOG = LoggerFactory.getLogger(HFileSystem.class);
private final FileSystem noChecksumFs; // read hfile data from storage
private final boolean useHBaseChecksum;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java
index cd3843f12a..faeac788ae 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java
@@ -24,12 +24,12 @@ import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import org.apache.commons.io.IOUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
@@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public class FSDataInputStreamWrapper implements Closeable {
- private static final Log LOG = LogFactory.getLog(FSDataInputStreamWrapper.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FSDataInputStreamWrapper.class);
private static final boolean isLogTraceEnabled = LOG.isTraceEnabled();
private final HFileSystem hfs;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java
index 52597b8297..42f3483648 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java
@@ -26,9 +26,9 @@ import java.io.InputStream;
import java.io.FileNotFoundException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.CanSetDropBehind;
import org.apache.hadoop.fs.CanSetReadahead;
import org.apache.hadoop.fs.FSDataInputStream;
@@ -92,7 +92,7 @@ import org.apache.hadoop.ipc.RemoteException;
*/
@InterfaceAudience.Private
public class FileLink {
- private static final Log LOG = LogFactory.getLog(FileLink.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FileLink.class);
/** Define the Back-reference directory name prefix: .links-<hfile>/ */
public static final String BACK_REFERENCES_DIRECTORY_PREFIX = ".links-";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HFileLink.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HFileLink.java
index ee1038608b..2aebdf0d04 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HFileLink.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HFileLink.java
@@ -22,8 +22,6 @@ import java.io.IOException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -38,6 +36,8 @@ import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.HFileArchiveUtil;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* HFileLink describes a link to an hfile.
@@ -58,7 +58,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="EQ_DOESNT_OVERRIDE_EQUALS",
justification="To be fixed but warning suppressed for now")
public class HFileLink extends FileLink {
- private static final Log LOG = LogFactory.getLog(HFileLink.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HFileLink.class);
/**
* A non-capture group, for HFileLink, so that this can be embedded.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java
index 1dfffd6cff..80207eb73e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java
@@ -23,9 +23,9 @@ import java.nio.ByteBuffer;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -55,7 +55,7 @@ import org.apache.hadoop.hbase.util.Bytes;
*/
@InterfaceAudience.Private
public class HalfStoreFileReader extends StoreFileReader {
- private static final Log LOG = LogFactory.getLog(HalfStoreFileReader.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HalfStoreFileReader.class);
final boolean top;
// This is the key we split around. Its the first possible entry on a row:
// i.e. empty column and a timestamp of LATEST_TIMESTAMP.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java
index a7c26e0077..3248dd2ba3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java
@@ -38,8 +38,6 @@ import java.util.EnumSet;
import java.util.List;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.CryptoProtocolVersion;
import org.apache.hadoop.crypto.Encryptor;
@@ -85,7 +83,8 @@ import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.DataChecksum;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap;
import org.apache.hadoop.hbase.shaded.io.netty.bootstrap.Bootstrap;
@@ -117,7 +116,7 @@ import org.apache.hadoop.hbase.shaded.io.netty.util.concurrent.Promise;
@InterfaceAudience.Private
public final class FanOutOneBlockAsyncDFSOutputHelper {
- private static final Log LOG = LogFactory.getLog(FanOutOneBlockAsyncDFSOutputHelper.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FanOutOneBlockAsyncDFSOutputHelper.class);
private FanOutOneBlockAsyncDFSOutputHelper() {
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java
index 458df27ac2..22bd7d5aad 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java
@@ -74,8 +74,6 @@ import javax.security.sasl.SaslException;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.CipherOption;
import org.apache.hadoop.crypto.CipherSuite;
@@ -85,6 +83,9 @@ import org.apache.hadoop.crypto.Encryptor;
import org.apache.hadoop.crypto.key.KeyProvider.KeyVersion;
import org.apache.hadoop.fs.FileEncryptionInfo;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import com.google.protobuf.ByteString;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
@@ -108,7 +109,7 @@ import org.apache.hadoop.security.token.Token;
@InterfaceAudience.Private
public final class FanOutOneBlockAsyncDFSOutputSaslHelper {
- private static final Log LOG = LogFactory.getLog(FanOutOneBlockAsyncDFSOutputSaslHelper.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FanOutOneBlockAsyncDFSOutputSaslHelper.class);
private FanOutOneBlockAsyncDFSOutputSaslHelper() {
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
index a071fbdc89..242463cd65 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
@@ -22,11 +22,11 @@ import static org.apache.hadoop.hbase.HConstants.BUCKET_CACHE_SIZE_KEY;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory;
import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;
@@ -42,7 +42,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public class CacheConfig {
- private static final Log LOG = LogFactory.getLog(CacheConfig.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(CacheConfig.class.getName());
/**
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java
index 8342788ea4..5eb182640f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java
@@ -21,10 +21,10 @@ import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.ChecksumException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.ChecksumType;
import org.apache.hadoop.util.DataChecksum;
@@ -33,7 +33,7 @@ import org.apache.hadoop.util.DataChecksum;
*/
@InterfaceAudience.Private
public class ChecksumUtil {
- public static final Log LOG = LogFactory.getLog(ChecksumUtil.class);
+ public static final Logger LOG = LoggerFactory.getLogger(ChecksumUtil.class);
/** This is used to reserve space in a byte buffer */
private static byte[] DUMMY_VALUE = new byte[128 * HFileBlock.CHECKSUM_SIZE];
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterWriter.java
index 7e5db088ff..0b58b21561 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterWriter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterWriter.java
@@ -23,14 +23,14 @@ import java.io.IOException;
import java.util.LinkedList;
import java.util.Queue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.hbase.util.BloomFilterChunk;
import org.apache.hadoop.hbase.util.BloomFilterUtil;
@@ -47,8 +47,8 @@ import org.apache.hadoop.io.Writable;
public class CompoundBloomFilterWriter extends CompoundBloomFilterBase
implements BloomFilterWriter, InlineBlockWriter {
- private static final Log LOG =
- LogFactory.getLog(CompoundBloomFilterWriter.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(CompoundBloomFilterWriter.class);
/** The current chunk being written to */
private BloomFilterChunk chunk;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
index d63c120212..e0c2c796b4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
@@ -38,8 +38,6 @@ import java.util.TreeMap;
import java.util.concurrent.atomic.LongAdder;
import org.apache.commons.io.IOUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -51,6 +49,8 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
import org.apache.hadoop.hbase.io.MetricsIO;
@@ -140,7 +140,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
@InterfaceAudience.Private
public class HFile {
// LOG is being used in HFileBlock and CheckSumUtil
- static final Log LOG = LogFactory.getLog(HFile.class);
+ static final Logger LOG = LoggerFactory.getLogger(HFile.class);
/**
* Maximum length of key in HFile.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
index c6c7446f3f..39ba6cd6d3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
@@ -27,14 +27,14 @@ import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.io.ByteArrayOutputStream;
import org.apache.hadoop.hbase.io.ByteBuffInputStream;
@@ -110,7 +110,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
*/
@InterfaceAudience.Private
public class HFileBlock implements Cacheable {
- private static final Log LOG = LogFactory.getLog(HFileBlock.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HFileBlock.class);
// Block Header fields.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java
index 557a69c6b0..7b8815f625 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java
@@ -30,8 +30,6 @@ import java.util.Collections;
import java.util.List;
import java.util.concurrent.atomic.AtomicReference;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.hbase.ByteBufferKeyOnlyKeyValue;
@@ -43,6 +41,8 @@ import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KeyOnlyKeyValue;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.HFile.CachingBlockReader;
@@ -68,7 +68,7 @@ import org.apache.hadoop.util.StringUtils;
@InterfaceAudience.Private
public class HFileBlockIndex {
- private static final Log LOG = LogFactory.getLog(HFileBlockIndex.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HFileBlockIndex.class);
static final int DEFAULT_MAX_CHUNK_SIZE = 128 * 1024;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
index 5aea107cdc..639130db00 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
@@ -46,8 +46,6 @@ import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
@@ -80,6 +78,8 @@ import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.codahale.metrics.ConsoleReporter;
import com.codahale.metrics.Counter;
@@ -99,7 +99,7 @@ import com.codahale.metrics.Timer;
@InterfaceStability.Evolving
public class HFilePrettyPrinter extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(HFilePrettyPrinter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HFilePrettyPrinter.class);
private Options options = new Options();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java
index 5021b4d1de..22e38bf58e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java
@@ -26,8 +26,6 @@ import java.util.List;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
@@ -43,6 +41,8 @@ import org.apache.hadoop.hbase.SizeCachedKeyValue;
import org.apache.hadoop.hbase.SizeCachedNoTagsKeyValue;
import org.apache.hadoop.hbase.trace.TraceUtil;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
import org.apache.hadoop.hbase.io.compress.Compression;
@@ -74,7 +74,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
// one file. Ditto for all the HFileReader.ScannerV? implementations. I was running up against
// the MaxInlineLevel limit because too many tiers involved reading from an hfile. Was also hard
// to navigate the source code when so many classes participating in read.
- private static final Log LOG = LogFactory.getLog(HFileReaderImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HFileReaderImpl.class);
/** Data block index reader keeping the root data index in memory */
private HFileBlockIndex.CellBasedKeyBlockIndexReader dataBlockIndexReader;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java
index 8c631ebb31..50d5ddc66e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java
@@ -26,8 +26,6 @@ import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
@@ -42,6 +40,8 @@ import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.CellComparatorImpl.MetaCellComparator;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.crypto.Encryption;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
@@ -60,7 +60,7 @@ import org.apache.hadoop.io.Writable;
*/
@InterfaceAudience.Private
public class HFileWriterImpl implements HFile.Writer {
- private static final Log LOG = LogFactory.getLog(HFileWriterImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HFileWriterImpl.class);
private static final long UNSET = -1;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
index 3733535df1..43238d930f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
@@ -34,10 +34,10 @@ import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.LongAdder;
import java.util.concurrent.locks.ReentrantLock;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.util.Bytes;
@@ -99,7 +99,7 @@ import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
@JsonIgnoreProperties({"encodingCountsForTest"})
public class LruBlockCache implements ResizableBlockCache, HeapSize {
- private static final Log LOG = LogFactory.getLog(LruBlockCache.class);
+ private static final Logger LOG = LoggerFactory.getLogger(LruBlockCache.class);
/**
* Percentage of total size that eviction will evict until; e.g. if set to .8, then we will keep
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java
index 838fa41889..ce8d53338e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java
@@ -29,17 +29,16 @@ import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class PrefetchExecutor {
- private static final Log LOG = LogFactory.getLog(PrefetchExecutor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(PrefetchExecutor.class);
/** Futures for tracking block prefetch activity */
private static final Map> prefetchFutures = new ConcurrentSkipListMap<>();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java
index 40b64be18d..8586967d86 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java
@@ -31,9 +31,9 @@ import java.util.concurrent.atomic.LongAdder;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.MinMaxPriorityQueue;
import org.apache.commons.collections4.map.LinkedMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.BucketEntry;
@@ -54,7 +54,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.primitives.Ints;
@InterfaceAudience.Private
@JsonIgnoreProperties({"indexStatistics", "freeSize", "usedSize"})
public final class BucketAllocator {
- private static final Log LOG = LogFactory.getLog(BucketAllocator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(BucketAllocator.class);
@JsonIgnoreProperties({"completelyFree", "uninstantiated"})
public final static class Bucket {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
index 0ced7c1caa..ee6eca4bf8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
@@ -54,11 +54,12 @@ import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import com.google.common.base.Preconditions;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.hfile.BlockCache;
import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
@@ -102,7 +103,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa
*/
@InterfaceAudience.Private
public class BucketCache implements BlockCache, HeapSize {
- private static final Log LOG = LogFactory.getLog(BucketCache.class);
+ private static final Logger LOG = LoggerFactory.getLogger(BucketCache.class);
/** Priority buckets config */
static final String SINGLE_FACTOR_CONFIG_NAME = "hbase.bucketcache.single.factor";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
index ad1c394d7c..9c19c8846c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
@@ -28,9 +28,10 @@ import java.nio.channels.FileChannel;
import java.util.Arrays;
import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.io.hfile.Cacheable;
import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer;
import org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;
@@ -44,7 +45,7 @@ import org.apache.hadoop.util.StringUtils;
*/
@InterfaceAudience.Private
public class FileIOEngine implements IOEngine {
- private static final Log LOG = LogFactory.getLog(FileIOEngine.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FileIOEngine.class);
public static final String FILE_DELIMITER = ",";
private final String[] filePaths;
private final FileChannel[] fileChannels;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileMmapEngine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileMmapEngine.java
index 4fe39d38ae..e2f019114b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileMmapEngine.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileMmapEngine.java
@@ -23,9 +23,9 @@ import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.io.hfile.Cacheable;
import org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;
import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer;
@@ -41,7 +41,7 @@ import org.apache.hadoop.util.StringUtils;
*/
@InterfaceAudience.Private
public class FileMmapEngine implements IOEngine {
- static final Log LOG = LogFactory.getLog(FileMmapEngine.class);
+ static final Logger LOG = LoggerFactory.getLogger(FileMmapEngine.class);
private final String path;
private long size;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/util/MemorySizeUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/util/MemorySizeUtil.java
index 1f2025278d..471eb469b7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/util/MemorySizeUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/util/MemorySizeUtil.java
@@ -21,11 +21,11 @@ import java.lang.management.ManagementFactory;
import java.lang.management.MemoryType;
import java.lang.management.MemoryUsage;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.MemStoreLAB;
import org.apache.hadoop.hbase.util.Pair;
@@ -51,7 +51,7 @@ public class MemorySizeUtil {
// Default lower water mark limit is 95% size of memstore size.
public static final float DEFAULT_MEMSTORE_SIZE_LOWER_LIMIT = 0.95f;
- private static final Log LOG = LogFactory.getLog(MemorySizeUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MemorySizeUtil.class);
// a constant to convert a fraction to a percentage
private static final int CONVERT_TO_PERCENTAGE = 100;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/FifoRpcScheduler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/FifoRpcScheduler.java
index 679f237721..1606b16288 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/FifoRpcScheduler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/FifoRpcScheduler.java
@@ -17,11 +17,11 @@
*/
package org.apache.hadoop.hbase.ipc;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.DaemonThreadFactory;
import org.apache.hadoop.hbase.shaded.io.netty.util.internal.StringUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.concurrent.ArrayBlockingQueue;
@@ -36,7 +36,7 @@ import java.util.HashMap;
* This can be used for HMaster, where no prioritization is needed.
*/
public class FifoRpcScheduler extends RpcScheduler {
- private static final Log LOG = LogFactory.getLog(FifoRpcScheduler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FifoRpcScheduler.class);
private final int handlerCount;
private final int maxQueueLength;
private final AtomicInteger queueSize = new AtomicInteger(0);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcServer.java
index 91c468f197..7ac0d14e84 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcServer.java
@@ -38,13 +38,13 @@ import java.net.InetSocketAddress;
import java.util.List;
import java.util.concurrent.CountDownLatch;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.Server;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.security.HBasePolicyProvider;
@@ -63,7 +63,7 @@ import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.CONFIG})
public class NettyRpcServer extends RpcServer {
- public static final Log LOG = LogFactory.getLog(NettyRpcServer.class);
+ public static final Logger LOG = LoggerFactory.getLogger(NettyRpcServer.class);
private final InetSocketAddress bindAddress;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java
index 86537c0b01..a8a7fe0da4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java
@@ -22,13 +22,13 @@ package org.apache.hadoop.hbase.ipc;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest;
@@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC, HBaseInterfaceAudience.PHOENIX})
@InterfaceStability.Evolving
public class RWQueueRpcExecutor extends RpcExecutor {
- private static final Log LOG = LogFactory.getLog(RWQueueRpcExecutor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RWQueueRpcExecutor.class);
public static final String CALL_QUEUE_READ_SHARE_CONF_KEY =
"hbase.ipc.server.callqueue.read.ratio";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcExecutor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcExecutor.java
index 445a460c75..d59c77c35d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcExecutor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcExecutor.java
@@ -30,13 +30,13 @@ import java.util.concurrent.atomic.LongAdder;
import java.util.Map;
import java.util.HashMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.shaded.io.netty.util.internal.StringUtil;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler;
import org.apache.hadoop.hbase.util.BoundedPriorityBlockingQueue;
import org.apache.hadoop.hbase.util.ReflectionUtils;
@@ -51,7 +51,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Strings;
*/
@InterfaceAudience.Private
public abstract class RpcExecutor {
- private static final Log LOG = LogFactory.getLog(RpcExecutor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RpcExecutor.class);
protected static final int DEFAULT_CALL_QUEUE_SIZE_HARD_LIMIT = 250;
public static final String CALL_QUEUE_HANDLER_FACTOR_CONF_KEY = "hbase.ipc.server.callqueue.handler.factor";
@@ -151,7 +151,7 @@ public abstract class RpcExecutor {
}
protected int computeNumCallQueues(final int handlerCount, final float callQueuesHandlersFactor) {
- return Math.max(1, (int) Math.round(handlerCount * callQueuesHandlersFactor));
+ return Math.max(1, Math.round(handlerCount * callQueuesHandlersFactor));
}
public Map getCallQueueCountsSummary() {
@@ -205,7 +205,7 @@ public abstract class RpcExecutor {
}
for (int i = 0; i < numQueues; ++i) {
queues
- .add((BlockingQueue) ReflectionUtils.newInstance(queueClass, queueInitArgs));
+ .add(ReflectionUtils.newInstance(queueClass, queueInitArgs));
}
}
@@ -308,7 +308,7 @@ public abstract class RpcExecutor {
}
}
} catch (Exception e) {
- LOG.warn(e);
+ LOG.warn(e.toString(), e);
throw e;
} finally {
if (interrupted) {
@@ -385,6 +385,7 @@ public abstract class RpcExecutor {
this.queueSize = queueSize;
}
+ @Override
public int getNextQueue() {
return ThreadLocalRandom.current().nextInt(queueSize);
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
index 43af98836a..6705691bf8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
@@ -35,9 +35,6 @@ import java.util.Map;
import java.util.Optional;
import java.util.concurrent.atomic.LongAdder;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CallQueueTooBigException;
import org.apache.hadoop.hbase.CellScanner;
@@ -58,15 +55,6 @@ import org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.security.token.AuthenticationTokenSecretManager;
-import org.apache.hadoop.hbase.util.Pair;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.authorize.AuthorizationException;
-import org.apache.hadoop.security.authorize.PolicyProvider;
-import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
-import org.apache.hadoop.security.token.SecretManager;
-import org.apache.hadoop.security.token.TokenIdentifier;
-import org.apache.yetus.audience.InterfaceAudience;
-
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor;
@@ -76,6 +64,18 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader;
+import org.apache.hadoop.hbase.util.Pair;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authorize.AuthorizationException;
+import org.apache.hadoop.security.authorize.PolicyProvider;
+import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
+import org.apache.hadoop.security.token.SecretManager;
+import org.apache.hadoop.security.token.TokenIdentifier;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
/**
* An RPC server that hosts protobuf described Services.
@@ -85,7 +85,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHea
public abstract class RpcServer implements RpcServerInterface,
ConfigurationObserver {
// LOG is being used in CallRunner and the log level is being changed in tests
- public static final Log LOG = LogFactory.getLog(RpcServer.class);
+ public static final Logger LOG = LoggerFactory.getLogger(RpcServer.class);
protected static final CallQueueTooBigException CALL_QUEUE_TOO_BIG_EXCEPTION
= new CallQueueTooBigException();
@@ -109,7 +109,7 @@ public abstract class RpcServer implements RpcServerInterface,
protected static final String AUTH_FAILED_FOR = "Auth failed for ";
protected static final String AUTH_SUCCESSFUL_FOR = "Auth successful for ";
- protected static final Log AUDITLOG = LogFactory.getLog("SecurityLogger."
+ protected static final Logger AUDITLOG = LoggerFactory.getLogger("SecurityLogger."
+ Server.class.getName());
protected SecretManager secretManager;
protected final Map saslProps;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerFactory.java
index b1b047d711..fcd11f5bc7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerFactory.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerFactory.java
@@ -21,11 +21,11 @@ import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Server;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.ServiceDescriptor;
import org.apache.hadoop.hbase.util.ReflectionUtils;
@@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.util.ReflectionUtils;
@InterfaceAudience.Private
public class RpcServerFactory {
- public static final Log LOG = LogFactory.getLog(RpcServerFactory.class);
+ public static final Logger LOG = LoggerFactory.getLogger(RpcServerFactory.class);
public static final String CUSTOM_RPC_SERVER_IMPL_CONF_KEY = "hbase.rpc.server.impl";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ActiveMasterManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ActiveMasterManager.java
index ce9f290c56..62073db863 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ActiveMasterManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ActiveMasterManager.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.master;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.zookeeper.MasterAddressTracker;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
@@ -36,6 +34,8 @@ import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.zookeeper.ZKListener;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Handles everything on master-side related to master election.
@@ -52,7 +52,7 @@ import org.apache.zookeeper.KeeperException;
*/
@InterfaceAudience.Private
public class ActiveMasterManager extends ZKListener {
- private static final Log LOG = LogFactory.getLog(ActiveMasterManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ActiveMasterManager.class);
final AtomicBoolean clusterHasActiveMaster = new AtomicBoolean(false);
final AtomicBoolean clusterShutDown = new AtomicBoolean(false);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentVerificationReport.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentVerificationReport.java
index ccbfadc38b..5c084bf4eb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentVerificationReport.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentVerificationReport.java
@@ -26,14 +26,14 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.favored.FavoredNodeAssignmentHelper;
import org.apache.hadoop.hbase.favored.FavoredNodesPlan;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Helper class that is used by {@link RegionPlacementMaintainer} to print
* information for favored nodes
@@ -41,7 +41,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class AssignmentVerificationReport {
- private static final Log LOG = LogFactory.getLog(
+ private static final Logger LOG = LoggerFactory.getLogger(
AssignmentVerificationReport.class.getName());
private TableName tableName = null;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java
index d3ba231b15..23912d67c8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java
@@ -27,8 +27,6 @@ import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HConstants;
@@ -53,6 +51,8 @@ import org.apache.hadoop.hbase.util.PairOfSameType;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.util.Triple;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A janitor for the catalog tables. Scans the hbase:meta catalog
@@ -60,7 +60,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class CatalogJanitor extends ScheduledChore {
- private static final Log LOG = LogFactory.getLog(CatalogJanitor.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(CatalogJanitor.class.getName());
private final AtomicBoolean alreadyRunning = new AtomicBoolean(false);
private final AtomicBoolean enabled = new AtomicBoolean(true);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DeadServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DeadServer.java
index df5444ad4f..db04c606a9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DeadServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DeadServer.java
@@ -18,9 +18,9 @@
*/
package org.apache.hadoop.hbase.master;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Pair;
@@ -42,7 +42,7 @@ import java.util.Set;
*/
@InterfaceAudience.Private
public class DeadServer {
- private static final Log LOG = LogFactory.getLog(DeadServer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DeadServer.class);
/**
* Set of known dead servers. On znode expiration, servers are added here.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DrainingServerTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DrainingServerTracker.java
index 81a8b55385..a9e579629c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DrainingServerTracker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DrainingServerTracker.java
@@ -22,8 +22,6 @@ import java.util.List;
import java.util.NavigableSet;
import java.util.TreeSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.zookeeper.ZKListener;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
@@ -31,6 +29,8 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.ServerName;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tracks the list of draining region servers via ZK.
@@ -51,7 +51,7 @@ import org.apache.zookeeper.KeeperException;
*/
@InterfaceAudience.Private
public class DrainingServerTracker extends ZKListener {
- private static final Log LOG = LogFactory.getLog(DrainingServerTracker.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DrainingServerTracker.class);
private ServerManager serverManager;
private final NavigableSet drainingServers = new TreeSet<>();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ExpiredMobFileCleanerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ExpiredMobFileCleanerChore.java
index b1d83791c6..d37a80a1c3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ExpiredMobFileCleanerChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ExpiredMobFileCleanerChore.java
@@ -21,11 +21,11 @@ package org.apache.hadoop.hbase.master;
import java.util.Map;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.TableDescriptors;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.master.locking.LockManager;
@@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.procedure2.LockType;
@InterfaceAudience.Private
public class ExpiredMobFileCleanerChore extends ScheduledChore {
- private static final Log LOG = LogFactory.getLog(ExpiredMobFileCleanerChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ExpiredMobFileCleanerChore.class);
private final HMaster master;
private ExpiredMobFileCleaner cleaner;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 262dfa2375..c238c8194e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -39,6 +39,7 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
+import java.util.Objects;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
@@ -50,8 +51,6 @@ import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Function;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ClusterStatus;
@@ -188,7 +187,8 @@ import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.ServerConnector;
import org.eclipse.jetty.servlet.ServletHolder;
import org.eclipse.jetty.webapp.WebAppContext;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
@@ -221,7 +221,7 @@ import com.google.protobuf.Service;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
@SuppressWarnings("deprecation")
public class HMaster extends HRegionServer implements MasterServices {
- private static final Log LOG = LogFactory.getLog(HMaster.class.getName());
+ private static Logger LOG = LoggerFactory.getLogger(HMaster.class.getName());
/**
* Protection against zombie master. Started once Master accepts active responsibility and
@@ -607,6 +607,7 @@ public class HMaster extends HRegionServer implements MasterServices {
return connector.getLocalPort();
}
+ @Override
protected Function getMetaTableObserver() {
return builder -> builder.setRegionReplication(conf.getInt(HConstants.META_REPLICAS_NUM, HConstants.DEFAULT_META_REPLICA_NUM));
}
@@ -818,7 +819,7 @@ public class HMaster extends HRegionServer implements MasterServices {
// Wait for region servers to report in
String statusStr = "Wait for region servers to report in";
status.setStatus(statusStr);
- LOG.info(status);
+ LOG.info(Objects.toString(status));
waitForRegionServers(status);
if (this.balancer instanceof FavoredNodesPromoter) {
@@ -1528,6 +1529,7 @@ public class HMaster extends HRegionServer implements MasterServices {
/**
* @return Client info for use as prefix on an audit log string; who did an action
*/
+ @Override
public String getClientIdAuditPrefix() {
return "Client=" + RpcServer.getRequestUserName().orElse(null)
+ "/" + RpcServer.getRemoteAddress().orElse(null);
@@ -2017,7 +2019,7 @@ public class HMaster extends HRegionServer implements MasterServices {
}
} catch (Throwable t) {
status.setStatus("Failed to become active: " + t.getMessage());
- LOG.fatal("Failed to become active master", t);
+ LOG.error("Failed to become active master", t);
// HBASE-5680: Likely hadoop23 vs hadoop 20.x/1.x incompatibility
if (t instanceof NoClassDefFoundError &&
t.getMessage()
@@ -2606,13 +2608,13 @@ public class HMaster extends HRegionServer implements MasterServices {
}
if (cpHost != null) {
// HBASE-4014: dump a list of loaded coprocessors.
- LOG.fatal("Master server abort: loaded coprocessors are: " +
+ LOG.error("Master server abort: loaded coprocessors are: " +
getLoadedCoprocessors());
}
if (t != null) {
- LOG.fatal(msg, t);
+ LOG.error(msg, t);
} else {
- LOG.fatal(msg);
+ LOG.error(msg);
}
try {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java
index 093412a47f..3ec70d3b03 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java
@@ -26,8 +26,6 @@ import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.LocalHBaseCluster;
@@ -46,10 +44,12 @@ import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class HMasterCommandLine extends ServerCommandLine {
- private static final Log LOG = LogFactory.getLog(HMasterCommandLine.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HMasterCommandLine.class);
private static final String USAGE =
"Usage: Master [opts] start|stop|clear\n" +
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
index bc262290fc..ee7bcd6e6a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
@@ -23,8 +23,7 @@ import java.util.List;
import java.util.Set;
import com.google.protobuf.Service;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.MetaMutationAnnotation;
@@ -61,6 +60,8 @@ import org.apache.hadoop.hbase.quotas.GlobalQuotaSettings;
import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
import org.apache.hadoop.hbase.security.User;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Provides the coprocessor framework and environment for master oriented
@@ -71,7 +72,7 @@ import org.apache.yetus.audience.InterfaceAudience;
public class MasterCoprocessorHost
extends CoprocessorHost {
- private static final Log LOG = LogFactory.getLog(MasterCoprocessorHost.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MasterCoprocessorHost.class);
/**
* Coprocessor environment extension providing access to master related
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
index 27987f6bce..a39c602739 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.master;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -47,6 +45,8 @@ import org.apache.hadoop.hbase.util.FSTableDescriptors;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This class abstracts a bunch of operations the HMaster needs to interact with
@@ -55,7 +55,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class MasterFileSystem {
- private static final Log LOG = LogFactory.getLog(MasterFileSystem.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MasterFileSystem.class);
/** Parameter name for HBase instance root directory permission*/
public static final String HBASE_DIR_PERMS = "hbase.rootdir.perms";
@@ -264,12 +264,12 @@ public class MasterFileSystem {
HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS));
}
} catch (DeserializationException de) {
- LOG.fatal("Please fix invalid configuration for " + HConstants.HBASE_DIR, de);
+ LOG.error("Please fix invalid configuration for " + HConstants.HBASE_DIR, de);
IOException ioe = new IOException();
ioe.initCause(de);
throw ioe;
} catch (IllegalArgumentException iae) {
- LOG.fatal("Please fix invalid configuration for "
+ LOG.error("Please fix invalid configuration for "
+ HConstants.HBASE_DIR + " " + rd.toString(), iae);
throw iae;
}
@@ -444,7 +444,7 @@ public class MasterFileSystem {
public void stop() {
}
- public void logFileSystemState(Log log) throws IOException {
+ public void logFileSystemState(Logger log) throws IOException {
FSUtils.logFileSystemState(fs, rootdir, log);
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterMetaBootstrap.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterMetaBootstrap.java
index 7abf02cf55..78e7e6e964 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterMetaBootstrap.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterMetaBootstrap.java
@@ -22,8 +22,6 @@ import java.io.IOException;
import java.util.List;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
@@ -38,13 +36,15 @@ import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Used by the HMaster on startup to split meta logs and assign the meta table.
*/
@InterfaceAudience.Private
public class MasterMetaBootstrap {
- private static final Log LOG = LogFactory.getLog(MasterMetaBootstrap.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MasterMetaBootstrap.class);
private final MonitoredTask status;
private final HMaster master;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterMobCompactionThread.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterMobCompactionThread.java
index 8677975396..9d6da0c1ff 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterMobCompactionThread.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterMobCompactionThread.java
@@ -27,12 +27,12 @@ import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.master.locking.LockManager;
import org.apache.hadoop.hbase.mob.MobUtils;
@@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
*/
@InterfaceAudience.Private
public class MasterMobCompactionThread {
- static final Log LOG = LogFactory.getLog(MasterMobCompactionThread.class);
+ static final Logger LOG = LoggerFactory.getLogger(MasterMobCompactionThread.class);
private final HMaster master;
private final Configuration conf;
private final ExecutorService mobCompactorPool;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
index 3e2f0efdeb..8794445ee2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
@@ -31,8 +31,6 @@ import java.util.Map.Entry;
import java.util.Set;
import java.util.stream.Collectors;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HConstants;
@@ -85,14 +83,6 @@ import org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.access.AccessController;
import org.apache.hadoop.hbase.security.visibility.VisibilityController;
-import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
-import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-import org.apache.hadoop.hbase.util.ForeignExceptionUtil;
-import org.apache.hadoop.hbase.util.Pair;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.zookeeper.KeeperException;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
@@ -115,134 +105,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockR
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockService;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ClearDeadServersRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ClearDeadServersResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DecommissionRegionServersRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DecommissionRegionServersResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetLocksRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetLocksResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProceduresRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProceduresResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCleanerChoreEnabledRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCleanerChoreEnabledResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDecommissionedRegionServersRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDecommissionedRegionServersResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTableRegionsRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTableRegionsResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RecommissionRegionServerRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RecommissionRegionServerResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.*;
import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot;
@@ -280,6 +143,16 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.Repli
import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
+import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.hadoop.hbase.util.ForeignExceptionUtil;
+import org.apache.hadoop.hbase.util.Pair;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Implements the master RPC services.
@@ -289,7 +162,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot
public class MasterRpcServices extends RSRpcServices
implements MasterService.BlockingInterface, RegionServerStatusService.BlockingInterface,
LockService.BlockingInterface {
- private static final Log LOG = LogFactory.getLog(MasterRpcServices.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(MasterRpcServices.class.getName());
private final HMaster master;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterWalManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterWalManager.java
index 891ea9b49c..52ba0991aa 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterWalManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterWalManager.java
@@ -27,8 +27,6 @@ import java.util.Set;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -41,7 +39,8 @@ import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
import org.apache.hadoop.hbase.wal.WALSplitter;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -50,7 +49,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public class MasterWalManager {
- private static final Log LOG = LogFactory.getLog(MasterWalManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MasterWalManager.class);
final static PathFilter META_FILTER = new PathFilter() {
@Override
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MetricsMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MetricsMaster.java
index c2c37bdd29..83a6988006 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MetricsMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MetricsMaster.java
@@ -18,10 +18,10 @@
package org.apache.hadoop.hbase.master;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.metrics.Counter;
import org.apache.hadoop.hbase.metrics.Histogram;
@@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.procedure2.ProcedureMetrics;
@InterfaceStability.Evolving
@InterfaceAudience.Private
public class MetricsMaster {
- private static final Log LOG = LogFactory.getLog(MetricsMaster.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MetricsMaster.class);
private MetricsMasterSource masterSource;
private MetricsMasterProcSource masterProcSource;
private MetricsMasterQuotaSource masterQuotaSource;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MobCompactionChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MobCompactionChore.java
index 2e0e44c470..8a7c4e1bf8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MobCompactionChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MobCompactionChore.java
@@ -22,11 +22,11 @@ import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.TableDescriptors;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableState;
@@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.procedure2.LockType;
@InterfaceAudience.Private
public class MobCompactionChore extends ScheduledChore {
- private static final Log LOG = LogFactory.getLog(MobCompactionChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MobCompactionChore.class);
private HMaster master;
private ExecutorService pool;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RackManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RackManager.java
index 58acf83c9f..447c6a6d8e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RackManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RackManager.java
@@ -22,9 +22,9 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.util.ReflectionUtils;
@@ -37,7 +37,7 @@ import org.apache.hadoop.net.ScriptBasedMapping;
*/
@InterfaceAudience.Private
public class RackManager {
- private static final Log LOG = LogFactory.getLog(RackManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RackManager.class);
public static final String UNKNOWN_RACK = "Unknown Rack";
private DNSToSwitchMapping switchMapping;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.java
index afd402b190..370f1f26f7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.java
@@ -38,8 +38,6 @@ import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hbase.ClusterStatus.Option;
@@ -57,10 +55,9 @@ import org.apache.hadoop.hbase.favored.FavoredNodesPlan;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.MunkresAssignment;
import org.apache.hadoop.hbase.util.Pair;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;
@@ -74,7 +71,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavor
@InterfaceAudience.Private
// TODO: Remove? Unused. Partially implemented only.
public class RegionPlacementMaintainer {
- private static final Log LOG = LogFactory.getLog(RegionPlacementMaintainer.class
+ private static final Logger LOG = LoggerFactory.getLogger(RegionPlacementMaintainer.class
.getName());
//The cost of a placement that should never be assigned.
private static final float MAX_COST = Float.POSITIVE_INFINITY;
@@ -980,12 +977,6 @@ public class RegionPlacementMaintainer {
opt.addOption("ld", "locality-dispersion", false, "print locality and dispersion " +
"information for current plan");
try {
- // Set the log4j
- Logger.getLogger("org.apache.zookeeper").setLevel(Level.ERROR);
- Logger.getLogger("org.apache.hadoop.hbase").setLevel(Level.ERROR);
- Logger.getLogger("org.apache.hadoop.hbase.master.RegionPlacementMaintainer")
- .setLevel(Level.INFO);
-
CommandLine cmd = new GnuParser().parse(opt, args);
Configuration conf = HBaseConfiguration.create();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionServerTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionServerTracker.java
index 0e9351d226..2f2d536ab5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionServerTracker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionServerTracker.java
@@ -25,8 +25,6 @@ import java.util.List;
import java.util.NavigableMap;
import java.util.TreeMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.zookeeper.ZKListener;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
@@ -36,6 +34,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tracks the online region servers via ZK.
@@ -49,7 +49,7 @@ import org.apache.zookeeper.KeeperException;
*/
@InterfaceAudience.Private
public class RegionServerTracker extends ZKListener {
- private static final Log LOG = LogFactory.getLog(RegionServerTracker.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionServerTracker.class);
private NavigableMap regionServers = new TreeMap<>();
private ServerManager serverManager;
private MasterServices server;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java
index b86315be7a..923a0a7ae7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java
@@ -38,8 +38,6 @@ import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Predicate;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClockOutOfSyncException;
import org.apache.hadoop.hbase.HConstants;
@@ -61,7 +59,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -109,7 +108,7 @@ public class ServerManager {
public static final String WAIT_ON_REGIONSERVERS_INTERVAL =
"hbase.master.wait.on.regionservers.interval";
- private static final Log LOG = LogFactory.getLog(ServerManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ServerManager.class);
// Set if we are to shutdown the cluster.
private AtomicBoolean clusterShutdown = new AtomicBoolean(false);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotOfRegionAssignmentFromMeta.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotOfRegionAssignmentFromMeta.java
index 5d889a4f73..9031577584 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotOfRegionAssignmentFromMeta.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotOfRegionAssignmentFromMeta.java
@@ -32,8 +32,6 @@ import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.MetaTableAccessor;
@@ -47,6 +45,8 @@ import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.favored.FavoredNodeAssignmentHelper;
import org.apache.hadoop.hbase.favored.FavoredNodesPlan;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Used internally for reading meta and constructing datastructures that are
@@ -56,7 +56,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class SnapshotOfRegionAssignmentFromMeta {
- private static final Log LOG = LogFactory.getLog(SnapshotOfRegionAssignmentFromMeta.class
+ private static final Logger LOG = LoggerFactory.getLogger(SnapshotOfRegionAssignmentFromMeta.class
.getName());
private final Connection connection;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java
index 8ab087d814..132c744568 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java
@@ -36,8 +36,6 @@ import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -56,7 +54,8 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -90,7 +89,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public class SplitLogManager {
- private static final Log LOG = LogFactory.getLog(SplitLogManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SplitLogManager.class);
private final MasterServices server;
@@ -415,7 +414,7 @@ public class SplitLogManager {
batch.installed++;
return null;
}
- LOG.fatal("Logic error. Deleted task still present in tasks map");
+ LOG.error("Logic error. Deleted task still present in tasks map");
assert false : "Deleted task still present in tasks map";
return t;
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java
index 6a138ff80b..ce8182f64a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java
@@ -23,8 +23,6 @@ import java.io.InterruptedIOException;
import java.util.NavigableSet;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.CellBuilderFactory;
@@ -39,6 +37,8 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.ZKNamespaceManager;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
@@ -71,7 +71,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
justification="TODO: synchronize access on nsTable but it is done in tiers above and this " +
"class is going away/shrinking")
public class TableNamespaceManager {
- private static final Log LOG = LogFactory.getLog(TableNamespaceManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableNamespaceManager.class);
private Configuration conf;
private MasterServices masterServices;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java
index 330b752b5c..ad8908ac96 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java
@@ -28,13 +28,14 @@ import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
import edu.umd.cs.findbugs.annotations.NonNull;
import edu.umd.cs.findbugs.annotations.Nullable;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.TableDescriptors;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.TableState;
@@ -46,7 +47,7 @@ import org.apache.hadoop.hbase.client.TableState;
*/
@InterfaceAudience.Private
public class TableStateManager {
- private static final Log LOG = LogFactory.getLog(TableStateManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableStateManager.class);
private final ReadWriteLock lock = new ReentrantReadWriteLock();
private final MasterServices master;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java
index 5555062f3b..2af227a2d2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java
@@ -22,8 +22,6 @@ package org.apache.hadoop.hbase.master.assignment;
import java.io.IOException;
import java.util.Comparator;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -40,7 +38,8 @@ import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException;
import org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.RemoteOperation;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AssignRegionStateData;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RegionTransitionState;
@@ -72,7 +71,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProto
*/
@InterfaceAudience.Private
public class AssignProcedure extends RegionTransitionProcedure {
- private static final Log LOG = LogFactory.getLog(AssignProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AssignProcedure.class);
/**
* Set to true when we need recalibrate -- choose a new target -- because original assign failed.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java
index 5daf96d8c7..fac51f037e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java
@@ -36,8 +36,6 @@ import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
import java.util.stream.Collectors;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.HConstants;
@@ -92,6 +90,8 @@ import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.util.VersionInfo;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* The AssignmentManager is the coordinator for region assign/unassign operations.
@@ -106,7 +106,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class AssignmentManager implements ServerListener {
- private static final Log LOG = LogFactory.getLog(AssignmentManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AssignmentManager.class);
// TODO: AMv2
// - handle region migration from hbase1 to hbase2.
@@ -510,7 +510,7 @@ public class AssignmentManager implements ServerListener {
}
}
} catch (Throwable t) {
- LOG.error(t);
+ LOG.error(t.toString(), t);
}
}).start();
}
@@ -748,7 +748,7 @@ public class AssignmentManager implements ServerListener {
plan.setDestination(getBalancer().randomAssignment(plan.getRegionInfo(),
this.master.getServerManager().createDestinationServersList(exclude)));
} catch (HBaseIOException e) {
- LOG.warn(e);
+ LOG.warn(e.toString(), e);
}
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCMergedRegionsProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCMergedRegionsProcedure.java
index 37521cc67f..610003df91 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCMergedRegionsProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCMergedRegionsProcedure.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.master.assignment;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -30,7 +28,8 @@ import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException;
import org.apache.hadoop.hbase.procedure2.ProcedureYieldException;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.GCMergedRegionsState;
@@ -45,7 +44,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.G
@InterfaceAudience.Private
public class GCMergedRegionsProcedure
extends AbstractStateMachineTableProcedure {
- private static final Log LOG = LogFactory.getLog(GCMergedRegionsProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(GCMergedRegionsProcedure.class);
private RegionInfo father;
private RegionInfo mother;
private RegionInfo mergedChild;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCRegionProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCRegionProcedure.java
index 805b870b38..2b433484c6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCRegionProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCRegionProcedure.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.master.assignment;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.backup.HFileArchiver;
@@ -33,7 +31,8 @@ import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException;
import org.apache.hadoop.hbase.procedure2.ProcedureYieldException;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
@@ -47,7 +46,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.G
*/
@InterfaceAudience.Private
public class GCRegionProcedure extends AbstractStateMachineRegionProcedure {
- private static final Log LOG = LogFactory.getLog(GCRegionProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(GCRegionProcedure.class);
public GCRegionProcedure(final MasterProcedureEnv env, final RegionInfo hri) {
super(env, hri);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
index 0d107f2a9d..6b2d54e1a6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
@@ -24,8 +24,6 @@ import java.util.Arrays;
import java.util.Collection;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -64,7 +62,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
@@ -79,7 +78,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.M
@InterfaceAudience.Private
public class MergeTableRegionsProcedure
extends AbstractStateMachineTableProcedure {
- private static final Log LOG = LogFactory.getLog(MergeTableRegionsProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MergeTableRegionsProcedure.class);
private Boolean traceEnabled;
private volatile boolean lock = false;
private ServerName regionLocation;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
index 5940f2fe31..a29bfee2cb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.master.assignment;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -31,7 +29,8 @@ import org.apache.hadoop.hbase.master.procedure.AbstractStateMachineRegionProced
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.MoveRegionState;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.MoveRegionStateData;
@@ -44,7 +43,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.M
*/
@InterfaceAudience.Private
public class MoveRegionProcedure extends AbstractStateMachineRegionProcedure {
- private static final Log LOG = LogFactory.getLog(MoveRegionProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MoveRegionProcedure.class);
private RegionPlan plan;
public MoveRegionProcedure() {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStateStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStateStore.java
index 0b49b36ca9..39513dc945 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStateStore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStateStore.java
@@ -23,8 +23,8 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import java.util.Objects;
+
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.CellBuilderFactory;
@@ -48,7 +48,8 @@ import org.apache.hadoop.hbase.util.MultiHConnection;
import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
@@ -57,7 +58,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
*/
@InterfaceAudience.Private
public class RegionStateStore {
- private static final Log LOG = LogFactory.getLog(RegionStateStore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionStateStore.class);
/** The delimiter for meta columns for replicaIds > 0 */
protected static final char META_REPLICA_ID_DELIMITER = '_';
@@ -198,7 +199,7 @@ public class RegionStateStore {
.setType(CellBuilder.DataType.Put)
.setValue(Bytes.toBytes(state.name()))
.build());
- LOG.info(info);
+ LOG.info(Objects.toString(info));
final boolean serialReplication = hasSerialReplicationScope(regionInfo.getTable());
if (serialReplication && state == State.OPEN) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStates.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStates.java
index e9468734a2..c43760474b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStates.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStates.java
@@ -34,8 +34,6 @@ import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentSkipListMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
@@ -47,7 +45,8 @@ import org.apache.hadoop.hbase.procedure2.ProcedureEvent;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -57,7 +56,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public class RegionStates {
- private static final Log LOG = LogFactory.getLog(RegionStates.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionStates.class);
protected static final State[] STATES_EXPECTED_ON_OPEN = new State[] {
State.OFFLINE, State.CLOSED, // disable/offline
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
index 17ba75a07c..fbd6946954 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
@@ -22,8 +22,6 @@ package org.apache.hadoop.hbase.master.assignment;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -36,11 +34,11 @@ import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException;
import org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.RemoteOperation;
import org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.RemoteProcedure;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RegionTransitionState;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
-
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Base class for the Assign and Unassign Procedure.
@@ -89,7 +87,7 @@ public abstract class RegionTransitionProcedure
extends Procedure
implements TableProcedureInterface,
RemoteProcedure {
- private static final Log LOG = LogFactory.getLog(RegionTransitionProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionTransitionProcedure.class);
protected final AtomicBoolean aborted = new AtomicBoolean(false);
@@ -163,6 +161,7 @@ public abstract class RegionTransitionProcedure
protected abstract void reportTransition(MasterProcedureEnv env,
RegionStateNode regionNode, TransitionCode code, long seqId) throws UnexpectedStateException;
+ @Override
public abstract RemoteOperation remoteCallBuild(MasterProcedureEnv env, ServerName serverName);
/**
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java
index 69024340a4..809d6f3d24 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java
@@ -33,8 +33,6 @@ import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -72,7 +70,8 @@ import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
@@ -87,7 +86,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.S
@InterfaceAudience.Private
public class SplitTableRegionProcedure
extends AbstractStateMachineRegionProcedure {
- private static final Log LOG = LogFactory.getLog(SplitTableRegionProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SplitTableRegionProcedure.class);
private Boolean traceEnabled = null;
private RegionInfo daughter_1_RI;
private RegionInfo daughter_2_RI;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.java
index 66277bec11..8536e77ef1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.master.assignment;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.ServerName;
@@ -40,7 +38,8 @@ import org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.RemoteOperat
import org.apache.hadoop.hbase.regionserver.RegionServerAbortedException;
import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RegionTransitionState;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.UnassignRegionStateData;
@@ -72,7 +71,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProto
*/
@InterfaceAudience.Private
public class UnassignProcedure extends RegionTransitionProcedure {
- private static final Log LOG = LogFactory.getLog(UnassignProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(UnassignProcedure.class);
/**
* Where to send the unassign RPC.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BalancerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BalancerChore.java
index b964f219f0..712567cb3d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BalancerChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BalancerChore.java
@@ -20,11 +20,11 @@ package org.apache.hadoop.hbase.master.balancer;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.master.HMaster;
/**
@@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.master.HMaster;
*/
@InterfaceAudience.Private
public class BalancerChore extends ScheduledChore {
- private static final Log LOG = LogFactory.getLog(BalancerChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(BalancerChore.class);
private final HMaster master;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
index 8dd34c28ac..c30a6fd63f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
@@ -35,8 +35,6 @@ import java.util.function.Predicate;
import java.util.stream.Collectors;
import org.apache.commons.lang3.NotImplementedException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -59,6 +57,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* The base class for load balancers. It provides the the functions used to by
@@ -1004,7 +1004,7 @@ public abstract class BaseLoadBalancer implements LoadBalancer {
protected Configuration config = HBaseConfiguration.create();
protected RackManager rackManager;
private static final Random RANDOM = new Random(System.currentTimeMillis());
- private static final Log LOG = LogFactory.getLog(BaseLoadBalancer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(BaseLoadBalancer.class);
protected MetricsBalancer metricsBalancer = null;
protected ClusterStatus clusterStatus = null;
protected ServerName masterServerName;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/ClusterStatusChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/ClusterStatusChore.java
index 31b1e09291..427322dfc4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/ClusterStatusChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/ClusterStatusChore.java
@@ -20,10 +20,10 @@ package org.apache.hadoop.hbase.master.balancer;
import java.io.InterruptedIOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.master.LoadBalancer;
@@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.master.LoadBalancer;
*/
@InterfaceAudience.Private
public class ClusterStatusChore extends ScheduledChore {
- private static final Log LOG = LogFactory.getLog(ClusterStatusChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ClusterStatusChore.class);
private final HMaster master;
private final LoadBalancer balancer;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java
index a2fe9a25f2..b3e72faa1c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java
@@ -32,8 +32,6 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.ServerLoad;
import org.apache.hadoop.hbase.ServerName;
@@ -47,7 +45,8 @@ import org.apache.hadoop.hbase.master.LoadBalancer;
import org.apache.hadoop.hbase.master.MasterServices;
import org.apache.hadoop.hbase.master.RegionPlan;
import org.apache.hadoop.hbase.util.Pair;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
@@ -70,7 +69,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
public class FavoredStochasticBalancer extends StochasticLoadBalancer implements
FavoredNodesPromoter {
- private static final Log LOG = LogFactory.getLog(FavoredStochasticBalancer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FavoredStochasticBalancer.class);
private FavoredNodesManager fnm;
@Override
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java
index 3046243622..2b48f59a61 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java
@@ -28,8 +28,6 @@ import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.HDFSBlocksDistribution;
@@ -42,7 +40,8 @@ import org.apache.hadoop.hbase.master.assignment.AssignmentManager;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheBuilder;
import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheLoader;
import org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCache;
@@ -61,7 +60,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa
*/
@InterfaceAudience.Private
class RegionLocationFinder {
- private static final Log LOG = LogFactory.getLog(RegionLocationFinder.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionLocationFinder.class);
private static final long CACHE_TIME = 240 * 60 * 1000;
private static final HDFSBlocksDistribution EMPTY_BLOCK_DISTRIBUTION = new HDFSBlocksDistribution();
private Configuration conf;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/SimpleLoadBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/SimpleLoadBalancer.java
index e356942cbe..adfc577877 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/SimpleLoadBalancer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/SimpleLoadBalancer.java
@@ -28,8 +28,6 @@ import java.util.NavigableMap;
import java.util.Random;
import java.util.TreeMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@@ -39,7 +37,8 @@ import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.master.RegionPlan;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.MinMaxPriorityQueue;
/**
@@ -57,7 +56,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.MinMaxPriorityQu
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class SimpleLoadBalancer extends BaseLoadBalancer {
- private static final Log LOG = LogFactory.getLog(SimpleLoadBalancer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SimpleLoadBalancer.class);
private static final Random RANDOM = new Random(System.currentTimeMillis());
private RegionInfoComparator riComparator = new RegionInfoComparator();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.java
index 23d9cb47c6..7123155925 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.java
@@ -30,8 +30,6 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@@ -52,7 +50,8 @@ import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.SwapRegi
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Optional;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@@ -121,7 +120,7 @@ public class StochasticLoadBalancer extends BaseLoadBalancer {
"hbase.master.balancer.stochastic.minCostNeedBalance";
protected static final Random RANDOM = new Random(System.currentTimeMillis());
- private static final Log LOG = LogFactory.getLog(StochasticLoadBalancer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(StochasticLoadBalancer.class);
Map> loads = new HashMap<>();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
index 582df8436a..bc7c82d1db 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
@@ -35,8 +35,6 @@ import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.RecursiveTask;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -45,6 +43,8 @@ import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.ipc.RemoteException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Abstract Cleaner that uses a chain of delegates to clean a directory of files
@@ -53,7 +53,7 @@ import org.apache.hadoop.ipc.RemoteException;
public abstract class CleanerChore extends ScheduledChore
implements ConfigurationObserver {
- private static final Log LOG = LogFactory.getLog(CleanerChore.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(CleanerChore.class.getName());
private static final int AVAIL_PROCESSORS = Runtime.getRuntime().availableProcessors();
/**
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java
index 5c78dc498e..8c02f3ed6e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java
@@ -25,8 +25,6 @@ import java.util.Map;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -36,7 +34,8 @@ import org.apache.hadoop.hbase.io.HFileLink;
import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
import org.apache.hadoop.hbase.util.StealJobQueue;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
* This Chore, every time it runs, will clear the HFiles in the hfile archive
@@ -77,7 +76,7 @@ public class HFileCleaner extends CleanerChore {
"hbase.regionserver.hfilecleaner.small.thread.count";
public final static int DEFAULT_SMALL_HFILE_DELETE_THREAD_NUMBER = 1;
- private static final Log LOG = LogFactory.getLog(HFileCleaner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HFileCleaner.class);
StealJobQueue largeFileQueue;
BlockingQueue smallFileQueue;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileLinkCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileLinkCleaner.java
index 8129e34b9c..db5230c853 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileLinkCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileLinkCleaner.java
@@ -18,8 +18,7 @@
package org.apache.hadoop.hbase.master.cleaner;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -27,6 +26,8 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.io.HFileLink;
import org.apache.hadoop.hbase.mob.MobUtils;
import org.apache.hadoop.hbase.util.FSUtils;
@@ -41,7 +42,7 @@ import org.apache.hadoop.hbase.util.FSUtils;
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class HFileLinkCleaner extends BaseHFileCleanerDelegate {
- private static final Log LOG = LogFactory.getLog(HFileLinkCleaner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HFileLinkCleaner.class);
private FileSystem fs = null;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java
index 44aafe2038..5d5dddbc17 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java
@@ -25,8 +25,6 @@ import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.LinkedBlockingQueue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -35,7 +33,8 @@ import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureUtil;
import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -45,7 +44,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public class LogCleaner extends CleanerChore {
- private static final Log LOG = LogFactory.getLog(LogCleaner.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(LogCleaner.class.getName());
public static final String OLD_WALS_CLEANER_SIZE = "hbase.oldwals.cleaner.thread.size";
public static final int OLD_WALS_CLEANER_DEFAULT_SIZE = 2;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationMetaCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationMetaCleaner.java
index 23e5a666b2..43a99bdb51 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationMetaCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationMetaCleaner.java
@@ -25,14 +25,14 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.Delete;
@@ -48,7 +48,7 @@ import org.apache.hadoop.hbase.util.Bytes;
@InterfaceAudience.Private
public class ReplicationMetaCleaner extends ScheduledChore {
- private static final Log LOG = LogFactory.getLog(ReplicationMetaCleaner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationMetaCleaner.class);
private final Admin admin;
private final MasterServices master;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationZKNodeCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationZKNodeCleaner.java
index 3f7bd74473..97deab51ed 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationZKNodeCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationZKNodeCleaner.java
@@ -26,8 +26,6 @@ import java.util.Map;
import java.util.Set;
import java.util.Map.Entry;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
@@ -41,13 +39,15 @@ import org.apache.hadoop.hbase.replication.ReplicationStateZKBase;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Used to clean the replication queues belonging to the peer which does not exist.
*/
@InterfaceAudience.Private
public class ReplicationZKNodeCleaner {
- private static final Log LOG = LogFactory.getLog(ReplicationZKNodeCleaner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationZKNodeCleaner.class);
private final ZKWatcher zkw;
private final ReplicationQueuesClient queuesClient;
private final ReplicationPeers replicationPeers;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationZKNodeCleanerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationZKNodeCleanerChore.java
index 6be13849fa..8d5df9bfd2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationZKNodeCleanerChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationZKNodeCleanerChore.java
@@ -22,18 +22,18 @@ import java.io.IOException;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Chore that will clean the replication queues belonging to the peer which does not exist.
*/
@InterfaceAudience.Private
public class ReplicationZKNodeCleanerChore extends ScheduledChore {
- private static final Log LOG = LogFactory.getLog(ReplicationZKNodeCleanerChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationZKNodeCleanerChore.class);
private final ReplicationZKNodeCleaner cleaner;
public ReplicationZKNodeCleanerChore(Stoppable stopper, int period,
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveHFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveHFileCleaner.java
index f9ebdf3bb2..e789752d1a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveHFileCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveHFileCleaner.java
@@ -17,9 +17,9 @@
*/
package org.apache.hadoop.hbase.master.cleaner;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class TimeToLiveHFileCleaner extends BaseHFileCleanerDelegate {
- private static final Log LOG = LogFactory.getLog(TimeToLiveHFileCleaner.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(TimeToLiveHFileCleaner.class.getName());
public static final String TTL_CONF_KEY = "hbase.master.hfilecleaner.ttl";
// default ttl = 5 minutes
public static final long DEFAULT_TTL = 60000 * 5;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveLogCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveLogCleaner.java
index c2b872f1ef..7385273e71 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveLogCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveLogCleaner.java
@@ -17,10 +17,10 @@
*/
package org.apache.hadoop.hbase.master.cleaner;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class TimeToLiveLogCleaner extends BaseLogCleanerDelegate {
- private static final Log LOG = LogFactory.getLog(TimeToLiveLogCleaner.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(TimeToLiveLogCleaner.class.getName());
public static final String TTL_CONF_KEY = "hbase.master.logcleaner.ttl";
// default ttl = 10 minutes
public static final long DEFAULT_TTL = 600_000L;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveProcedureWALCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveProcedureWALCleaner.java
index cd9a7ec3d2..4c86d1f282 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveProcedureWALCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveProcedureWALCleaner.java
@@ -17,14 +17,14 @@
*/
package org.apache.hadoop.hbase.master.cleaner;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureUtil;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Procedure WAL cleaner that uses the timestamp of the Procedure WAL to determine if it should be
@@ -33,7 +33,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class TimeToLiveProcedureWALCleaner extends BaseFileCleanerDelegate {
- private static final Log LOG = LogFactory.getLog(TimeToLiveProcedureWALCleaner.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(TimeToLiveProcedureWALCleaner.class.getName());
public static final String TTL_CONF_KEY = "hbase.master.procedurewalcleaner.ttl";
// default ttl = 7 days
public static final long DEFAULT_TTL = 604_800_000L;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockManager.java
index 883d6596ca..36269f378e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockManager.java
@@ -22,15 +22,14 @@ import java.io.IOException;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.procedure2.LockType;
import org.apache.hadoop.hbase.util.NonceKey;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -38,7 +37,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public final class LockManager {
- private static final Log LOG = LogFactory.getLog(LockManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(LockManager.class);
private final HMaster master;
private final RemoteLocks remoteLocks;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockProcedure.java
index 61843d81fa..edf7642574 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockProcedure.java
@@ -24,8 +24,6 @@ import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -37,7 +35,8 @@ import org.apache.hadoop.hbase.procedure2.ProcedureEvent;
import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData;
@@ -57,7 +56,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
@InterfaceAudience.Private
public final class LockProcedure extends Procedure
implements TableProcedureInterface {
- private static final Log LOG = LogFactory.getLog(LockProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(LockProcedure.class);
public static final int DEFAULT_REMOTE_LOCKS_TIMEOUT_MS = 30000; // timeout in ms
public static final String REMOTE_LOCKS_TIMEOUT_MS_CONF =
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/MergeNormalizationPlan.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/MergeNormalizationPlan.java
index b6602b190b..7c33661d7c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/MergeNormalizationPlan.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/MergeNormalizationPlan.java
@@ -20,18 +20,18 @@ package org.apache.hadoop.hbase.master.normalizer;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Normalization plan to merge regions (smallest region in the table with its smallest neighbor).
*/
@InterfaceAudience.Private
public class MergeNormalizationPlan implements NormalizationPlan {
- private static final Log LOG = LogFactory.getLog(MergeNormalizationPlan.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(MergeNormalizationPlan.class.getName());
private final RegionInfo firstRegion;
private final RegionInfo secondRegion;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/RegionNormalizerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/RegionNormalizerChore.java
index 7137bc9d9d..19d2dc7a3b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/RegionNormalizerChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/RegionNormalizerChore.java
@@ -18,10 +18,10 @@
*/
package org.apache.hadoop.hbase.master.normalizer;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.master.HMaster;
import java.io.IOException;
@@ -32,7 +32,7 @@ import java.io.IOException;
*/
@InterfaceAudience.Private
public class RegionNormalizerChore extends ScheduledChore {
- private static final Log LOG = LogFactory.getLog(RegionNormalizerChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionNormalizerChore.class);
private final HMaster master;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java
index 8190f271ce..767324aa49 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java
@@ -23,8 +23,6 @@ import java.util.Collections;
import java.util.Comparator;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.RegionLoad;
import org.apache.hadoop.hbase.ServerName;
@@ -35,7 +33,8 @@ import org.apache.hadoop.hbase.master.MasterRpcServices;
import org.apache.hadoop.hbase.master.MasterServices;
import org.apache.hadoop.hbase.master.normalizer.NormalizationPlan.PlanType;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
/**
@@ -60,7 +59,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
@InterfaceAudience.Private
public class SimpleRegionNormalizer implements RegionNormalizer {
- private static final Log LOG = LogFactory.getLog(SimpleRegionNormalizer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SimpleRegionNormalizer.class);
private static final int MIN_REGION_COUNT = 3;
private MasterServices masterServices;
private MasterRpcServices masterRpcServices;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SplitNormalizationPlan.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SplitNormalizationPlan.java
index 9217143edd..b5f8e823ec 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SplitNormalizationPlan.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SplitNormalizationPlan.java
@@ -21,18 +21,18 @@ package org.apache.hadoop.hbase.master.normalizer;
import java.io.IOException;
import java.util.Arrays;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Normalization plan to split region.
*/
@InterfaceAudience.Private
public class SplitNormalizationPlan implements NormalizationPlan {
- private static final Log LOG = LogFactory.getLog(SplitNormalizationPlan.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(SplitNormalizationPlan.class.getName());
private RegionInfo regionInfo;
private byte[] splitPoint;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java
index a524879bf0..a17108fdca 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java
@@ -25,8 +25,6 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -54,7 +52,8 @@ import org.apache.hadoop.hbase.util.FSTableDescriptors;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
@@ -65,7 +64,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot
@InterfaceAudience.Private
public class CloneSnapshotProcedure
extends AbstractStateMachineTableProcedure {
- private static final Log LOG = LogFactory.getLog(CloneSnapshotProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CloneSnapshotProcedure.class);
private TableDescriptor tableDescriptor;
private SnapshotDescription snapshot;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.java
index fa743bdbdd..f0d0af8c53 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.java
@@ -19,11 +19,12 @@
package org.apache.hadoop.hbase.master.procedure;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NamespaceExistException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.master.MasterFileSystem;
import org.apache.hadoop.hbase.master.TableNamespaceManager;
import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
@@ -38,7 +39,7 @@ import org.apache.hadoop.hbase.util.FSUtils;
@InterfaceAudience.Private
public class CreateNamespaceProcedure
extends AbstractStateMachineNamespaceProcedure {
- private static final Log LOG = LogFactory.getLog(CreateNamespaceProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CreateNamespaceProcedure.class);
private NamespaceDescriptor nsDescriptor;
private Boolean traceEnabled;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java
index ed137c2598..63d6d2f7e4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java
@@ -22,8 +22,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.DoNotRetryIOException;
@@ -42,7 +40,8 @@ import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.ModifyRegionUtils;
import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
@@ -52,7 +51,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.C
@InterfaceAudience.Private
public class CreateTableProcedure
extends AbstractStateMachineTableProcedure {
- private static final Log LOG = LogFactory.getLog(CreateTableProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CreateTableProcedure.class);
private TableDescriptor tableDescriptor;
private List newRegions;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteNamespaceProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteNamespaceProcedure.java
index 9646946e89..1c587eb55d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteNamespaceProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteNamespaceProcedure.java
@@ -20,8 +20,7 @@ package org.apache.hadoop.hbase.master.procedure;
import java.io.FileNotFoundException;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -29,6 +28,8 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NamespaceNotFoundException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.constraint.ConstraintException;
import org.apache.hadoop.hbase.master.MasterFileSystem;
import org.apache.hadoop.hbase.master.TableNamespaceManager;
@@ -44,7 +45,7 @@ import org.apache.hadoop.hbase.util.FSUtils;
@InterfaceAudience.Private
public class DeleteNamespaceProcedure
extends AbstractStateMachineNamespaceProcedure {
- private static final Log LOG = LogFactory.getLog(DeleteNamespaceProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DeleteNamespaceProcedure.class);
private NamespaceDescriptor nsDescriptor;
private String namespaceName;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.java
index 4cc18755ec..151e3d65d1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.java
@@ -22,8 +22,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -49,7 +47,8 @@ import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
@@ -58,7 +57,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.D
@InterfaceAudience.Private
public class DeleteTableProcedure
extends AbstractStateMachineTableProcedure {
- private static final Log LOG = LogFactory.getLog(DeleteTableProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DeleteTableProcedure.class);
private List regions;
private TableName tableName;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.java
index 045ee9e7e8..e748c6ce7f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.java
@@ -19,13 +19,14 @@
package org.apache.hadoop.hbase.master.procedure;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotEnabledException;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.TableState;
import org.apache.hadoop.hbase.constraint.ConstraintException;
import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
@@ -38,7 +39,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.D
@InterfaceAudience.Private
public class DisableTableProcedure
extends AbstractStateMachineTableProcedure {
- private static final Log LOG = LogFactory.getLog(DisableTableProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DisableTableProcedure.class);
private TableName tableName;
private boolean skipTableStateCheck;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/EnableTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/EnableTableProcedure.java
index cf40696960..c501e5396a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/EnableTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/EnableTableProcedure.java
@@ -22,8 +22,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.MetaTableAccessor;
@@ -31,6 +29,8 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotDisabledException;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -49,7 +49,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.E
@InterfaceAudience.Private
public class EnableTableProcedure
extends AbstractStateMachineTableProcedure {
- private static final Log LOG = LogFactory.getLog(EnableTableProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(EnableTableProcedure.class);
private TableName tableName;
private boolean skipTableStateCheck;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterDDLOperationHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterDDLOperationHelper.java
index 02ecdc6b3d..6d06de2c51 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterDDLOperationHelper.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterDDLOperationHelper.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.master.procedure;
import java.io.IOException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -31,13 +29,15 @@ import org.apache.hadoop.hbase.mob.MobConstants;
import org.apache.hadoop.hbase.mob.MobUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Helper class for schema change procedures
*/
@InterfaceAudience.Private
public final class MasterDDLOperationHelper {
- private static final Log LOG = LogFactory.getLog(MasterDDLOperationHelper.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MasterDDLOperationHelper.class);
private MasterDDLOperationHelper() {}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.java
index c9c3ac9820..0a4c97db59 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.java
@@ -20,14 +20,14 @@ package org.apache.hadoop.hbase.master.procedure;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.conf.ConfigurationObserver;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.ipc.RpcServer;
import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
@@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.util.FSUtils;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class MasterProcedureEnv implements ConfigurationObserver {
- private static final Log LOG = LogFactory.getLog(MasterProcedureEnv.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MasterProcedureEnv.class);
@InterfaceAudience.Private
public static class WALStoreLeaseRecovery implements WALProcedureStore.LeaseRecovery {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java
index 9402845354..c60de5c9df 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java
@@ -26,8 +26,6 @@ import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableExistsException;
@@ -50,7 +48,8 @@ import org.apache.hadoop.hbase.util.AvlUtil.AvlLinkedNode;
import org.apache.hadoop.hbase.util.AvlUtil.AvlTree;
import org.apache.hadoop.hbase.util.AvlUtil.AvlTreeIterator;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -104,7 +103,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public class MasterProcedureScheduler extends AbstractProcedureScheduler {
- private static final Log LOG = LogFactory.getLog(MasterProcedureScheduler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MasterProcedureScheduler.class);
private final static ServerQueueKeyComparator SERVER_QUEUE_KEY_COMPARATOR =
new ServerQueueKeyComparator();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureUtil.java
index 7826f96f79..b87f437989 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureUtil.java
@@ -21,10 +21,10 @@ package org.apache.hadoop.hbase.master.procedure;
import java.io.IOException;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.master.MasterServices;
import org.apache.hadoop.hbase.procedure2.Procedure;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
@@ -36,7 +36,7 @@ import org.apache.hadoop.security.UserGroupInformation;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public final class MasterProcedureUtil {
- private static final Log LOG = LogFactory.getLog(MasterProcedureUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MasterProcedureUtil.class);
private MasterProcedureUtil() {}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyNamespaceProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyNamespaceProcedure.java
index 697a2ea668..2a7dc5b28b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyNamespaceProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyNamespaceProcedure.java
@@ -19,11 +19,12 @@
package org.apache.hadoop.hbase.master.procedure;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NamespaceNotFoundException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.master.TableNamespaceManager;
import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -36,7 +37,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.M
@InterfaceAudience.Private
public class ModifyNamespaceProcedure
extends AbstractStateMachineNamespaceProcedure {
- private static final Log LOG = LogFactory.getLog(ModifyNamespaceProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ModifyNamespaceProcedure.class);
private NamespaceDescriptor oldNsDescriptor;
private NamespaceDescriptor newNsDescriptor;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java
index bda8b81e98..f0be1e0819 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java
@@ -23,8 +23,6 @@ import java.util.HashSet;
import java.util.List;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.MetaTableAccessor;
@@ -42,7 +40,8 @@ import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyTableState;
@@ -50,7 +49,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.M
@InterfaceAudience.Private
public class ModifyTableProcedure
extends AbstractStateMachineTableProcedure {
- private static final Log LOG = LogFactory.getLog(ModifyTableProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ModifyTableProcedure.class);
private TableDescriptor unmodifiedTableDescriptor = null;
private TableDescriptor modifiedTableDescriptor;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.java
index 23b83acd64..ae37a48d26 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.java
@@ -26,8 +26,6 @@ import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CoordinatedStateException;
import org.apache.hadoop.hbase.NotAllMetaRegionsOnlineException;
@@ -40,6 +38,8 @@ import org.apache.hadoop.hbase.quotas.MasterQuotaManager;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Helper to synchronously wait on conditions.
@@ -50,7 +50,7 @@ import org.apache.yetus.audience.InterfaceStability;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public final class ProcedureSyncWait {
- private static final Log LOG = LogFactory.getLog(ProcedureSyncWait.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ProcedureSyncWait.class);
private ProcedureSyncWait() {}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.java
index 045c416ff5..72e0846cef 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.java
@@ -25,8 +25,6 @@ import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -36,7 +34,8 @@ import org.apache.hadoop.hbase.master.ServerListener;
import org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.ipc.RemoteException;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
@@ -57,7 +56,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionR
public class RSProcedureDispatcher
extends RemoteProcedureDispatcher
implements ServerListener {
- private static final Log LOG = LogFactory.getLog(RSProcedureDispatcher.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RSProcedureDispatcher.class);
public static final String RS_RPC_STARTUP_WAIT_TIME_CONF_KEY =
"hbase.regionserver.rpc.startup.waittime";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RecoverMetaProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RecoverMetaProcedure.java
index f1174d42ea..90dfff043a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RecoverMetaProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RecoverMetaProcedure.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.master.procedure;
import java.io.IOException;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -36,7 +34,8 @@ import org.apache.hadoop.hbase.procedure2.ProcedureYieldException;
import org.apache.hadoop.hbase.procedure2.StateMachineProcedure;
import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
import org.apache.zookeeper.KeeperException;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RecoverMetaState;
@@ -51,7 +50,7 @@ import com.google.common.base.Preconditions;
public class RecoverMetaProcedure
extends StateMachineProcedure
implements TableProcedureInterface {
- private static final Log LOG = LogFactory.getLog(RecoverMetaProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RecoverMetaProcedure.class);
private ServerName failedMetaServer;
private boolean shouldSplitWal;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.java
index 2cf558437a..9aa5171786 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.java
@@ -25,8 +25,6 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.DoNotRetryIOException;
@@ -49,7 +47,8 @@ import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
import org.apache.hadoop.hbase.snapshot.SnapshotManifest;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
@@ -59,7 +58,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot
@InterfaceAudience.Private
public class RestoreSnapshotProcedure
extends AbstractStateMachineTableProcedure {
- private static final Log LOG = LogFactory.getLog(RestoreSnapshotProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RestoreSnapshotProcedure.class);
private TableDescriptor modifiedTableDescriptor;
private List regionsToRestore = null;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
index 0e37c1174b..65003e6b07 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
@@ -22,8 +22,6 @@ import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -42,6 +40,8 @@ import org.apache.hadoop.hbase.procedure2.StateMachineProcedure;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ServerCrashState;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Handle crashed server. This is a port to ProcedureV2 of what used to be euphemistically called
@@ -56,7 +56,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.S
public class ServerCrashProcedure
extends StateMachineProcedure
implements ServerProcedureInterface {
- private static final Log LOG = LogFactory.getLog(ServerCrashProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ServerCrashProcedure.class);
/**
* Name of the crashed server to process.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.java
index dce3b41419..541fb8e1ae 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.java
@@ -23,8 +23,6 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotDisabledException;
import org.apache.hadoop.hbase.TableNotFoundException;
@@ -36,7 +34,8 @@ import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.util.ModifyRegionUtils;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
@@ -45,7 +44,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.T
@InterfaceAudience.Private
public class TruncateTableProcedure
extends AbstractStateMachineTableProcedure {
- private static final Log LOG = LogFactory.getLog(TruncateTableProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TruncateTableProcedure.class);
private boolean preserveSplits;
private List regions;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java
index 84c154f8bf..ee5afd7d1e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java
@@ -23,8 +23,6 @@ import java.util.List;
import java.util.Set;
import java.util.concurrent.ThreadPoolExecutor;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionReplicaUtil;
@@ -39,7 +37,8 @@ import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
import org.apache.zookeeper.KeeperException;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
/**
@@ -50,7 +49,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class DisabledTableSnapshotHandler extends TakeSnapshotHandler {
- private static final Log LOG = LogFactory.getLog(DisabledTableSnapshotHandler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DisabledTableSnapshotHandler.class);
/**
* @param snapshot descriptor of the snapshot to take
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java
index 399a1274e4..db2b9c888e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java
@@ -22,8 +22,6 @@ import java.util.HashSet;
import java.util.List;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
@@ -34,7 +32,8 @@ import org.apache.hadoop.hbase.procedure.ProcedureCoordinator;
import org.apache.hadoop.hbase.snapshot.HBaseSnapshotException;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
@@ -46,7 +45,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot
@InterfaceAudience.Private
public class EnabledTableSnapshotHandler extends TakeSnapshotHandler {
- private static final Log LOG = LogFactory.getLog(EnabledTableSnapshotHandler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(EnabledTableSnapshotHandler.class);
private final ProcedureCoordinator coordinator;
public EnabledTableSnapshotHandler(SnapshotDescription snapshot, MasterServices master,
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java
index b698082f74..1e1a3c421d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java
@@ -22,8 +22,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.MetaTableAccessor;
@@ -41,7 +39,8 @@ import org.apache.hadoop.hbase.snapshot.SnapshotReferenceUtil;
import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
@@ -77,7 +76,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot
@InterfaceAudience.Private
@InterfaceStability.Unstable
public final class MasterSnapshotVerifier {
- private static final Log LOG = LogFactory.getLog(MasterSnapshotVerifier.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MasterSnapshotVerifier.class);
private SnapshotDescription snapshot;
private FileSystem fs;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotFileCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotFileCache.java
index 56d1319c94..4bc06f5314 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotFileCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotFileCache.java
@@ -31,10 +31,10 @@ import java.util.concurrent.locks.ReentrantLock;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -85,7 +85,7 @@ public class SnapshotFileCache implements Stoppable {
Collection filesUnderSnapshot(final Path snapshotDir) throws IOException;
}
- private static final Log LOG = LogFactory.getLog(SnapshotFileCache.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SnapshotFileCache.class);
private volatile boolean stop = false;
private final FileSystem fs;
private final SnapshotFileInspector fileInspector;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotHFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotHFileCleaner.java
index f3ca993576..a8475f0f6e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotHFileCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotHFileCleaner.java
@@ -22,10 +22,10 @@ import java.util.Collection;
import java.util.Collections;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.util.FSUtils;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
@InterfaceStability.Evolving
public class SnapshotHFileCleaner extends BaseHFileCleanerDelegate {
- private static final Log LOG = LogFactory.getLog(SnapshotHFileCleaner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SnapshotHFileCleaner.class);
/**
* Conf key for the frequency to attempt to refresh the cache of hfiles currently used in
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java
index 20a4f39935..3870601db4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java
@@ -29,8 +29,6 @@ import java.util.Map;
import java.util.Set;
import java.util.concurrent.ThreadPoolExecutor;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
@@ -83,7 +81,8 @@ import org.apache.hadoop.hbase.util.NonceKey;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
import org.apache.zookeeper.KeeperException;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
@@ -102,7 +101,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
@InterfaceStability.Unstable
public class SnapshotManager extends MasterProcedureManager implements Stoppable {
- private static final Log LOG = LogFactory.getLog(SnapshotManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SnapshotManager.class);
/** By default, check to see if the snapshot is complete every WAKE MILLIS (ms) */
private static final int SNAPSHOT_WAKE_MILLIS_DEFAULT = 500;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java
index 808cab5b35..9b077d15e6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java
@@ -25,8 +25,6 @@ import java.util.Set;
import java.util.concurrent.CancellationException;
import java.util.concurrent.locks.ReentrantLock;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -56,7 +54,8 @@ import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
/**
@@ -69,7 +68,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot
@InterfaceAudience.Private
public abstract class TakeSnapshotHandler extends EventHandler implements SnapshotSentinel,
ForeignExceptionSnare {
- private static final Log LOG = LogFactory.getLog(TakeSnapshotHandler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TakeSnapshotHandler.class);
private volatile boolean finished;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java
index 447629b550..01c195a4d3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java
@@ -23,8 +23,6 @@ import java.util.ArrayList;
import java.util.Date;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
@@ -52,6 +50,8 @@ import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Compact passed set of files in the mob-enabled column family.
@@ -59,7 +59,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class DefaultMobStoreCompactor extends DefaultCompactor {
- private static final Log LOG = LogFactory.getLog(DefaultMobStoreCompactor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DefaultMobStoreCompactor.class);
private long mobSizeThreshold;
private HMobStore mobStore;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
index 27809c4e00..a932dad708 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
@@ -24,8 +24,6 @@ import java.util.ArrayList;
import java.util.Date;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
@@ -46,6 +44,8 @@ import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* An implementation of the StoreFlusher. It extends the DefaultStoreFlusher.
@@ -65,7 +65,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class DefaultMobStoreFlusher extends DefaultStoreFlusher {
- private static final Log LOG = LogFactory.getLog(DefaultMobStoreFlusher.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DefaultMobStoreFlusher.class);
private final Object flushLock = new Object();
private long mobCellValueSizeThreshold = 0;
private Path targetPath;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/ExpiredMobFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/ExpiredMobFileCleaner.java
index 3924ee6483..053cba641f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/ExpiredMobFileCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/ExpiredMobFileCleaner.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.mob;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
@@ -29,6 +27,8 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.Connection;
@@ -47,7 +47,7 @@ import org.apache.hadoop.util.ToolRunner;
@InterfaceAudience.Private
public class ExpiredMobFileCleaner extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(ExpiredMobFileCleaner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ExpiredMobFileCleaner.class);
/**
* Cleans the MOB files when they're expired and their min versions are 0.
* If the latest timestamp of Cells in a MOB file is older than the TTL in the column family,
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobFileCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobFileCache.java
index 813de8ce11..13caee600f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobFileCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobFileCache.java
@@ -31,12 +31,12 @@ import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.LongAdder;
import java.util.concurrent.locks.ReentrantLock;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.IdLock;
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
@@ -50,7 +50,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa
@InterfaceAudience.Private
public class MobFileCache {
- private static final Log LOG = LogFactory.getLog(MobFileCache.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MobFileCache.class);
/*
* Eviction and statistics thread. Periodically run to print the statistics and
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java
index f77df4c21c..584d10bb11 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java
@@ -36,8 +36,6 @@ import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -79,6 +77,8 @@ import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.ReflectionUtils;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* The mob utilities
@@ -86,7 +86,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public final class MobUtils {
- private static final Log LOG = LogFactory.getLog(MobUtils.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MobUtils.class);
private final static long WEEKLY_THRESHOLD_MULTIPLIER = 7;
private final static long MONTHLY_THRESHOLD_MULTIPLIER = 4 * WEEKLY_THRESHOLD_MULTIPLIER;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java
index 0cccfa30cc..b09d7e9fd6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java
@@ -35,13 +35,12 @@ import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NavigableMap;
+import java.util.Objects;
import java.util.TreeMap;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -49,7 +48,6 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
-import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
@@ -89,6 +87,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* An implementation of {@link MobCompactor} that compacts the mob files in partitions.
@@ -96,7 +96,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class PartitionedMobCompactor extends MobCompactor {
- private static final Log LOG = LogFactory.getLog(PartitionedMobCompactor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(PartitionedMobCompactor.class);
protected long mergeableSize;
protected int delFileMaxCount;
/** The number of files compacted in a batch */
@@ -361,7 +361,7 @@ public class PartitionedMobCompactor extends MobCompactor {
LOG.info(
"After a mob compaction with all files selected, archiving the del files ");
for (CompactionDelPartition delPartition : request.getDelPartitions()) {
- LOG.info(delPartition.listDelFiles());
+ LOG.info(Objects.toString(delPartition.listDelFiles()));
try {
MobUtils.removeMobFiles(conf, fs, tableName, mobTableDir, column.getName(),
delPartition.getStoreFiles());
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
index 837ddf007d..61d0007561 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
@@ -28,11 +28,11 @@ import java.util.Iterator;
import java.util.List;
import org.apache.commons.collections4.queue.CircularFifoQueue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Threads;
@@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
*/
@InterfaceAudience.Private
public class TaskMonitor {
- private static final Log LOG = LogFactory.getLog(TaskMonitor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TaskMonitor.class);
public static final String MAX_TASKS_KEY = "hbase.taskmonitor.max.tasks";
public static final int DEFAULT_MAX_TASKS = 1000;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/namespace/NamespaceAuditor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/namespace/NamespaceAuditor.java
index efe2c1e957..0a74b093a8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/namespace/NamespaceAuditor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/namespace/NamespaceAuditor.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.namespace;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
@@ -30,6 +28,8 @@ import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.master.MasterServices;
import org.apache.hadoop.hbase.quotas.QuotaExceededException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* The Class NamespaceAuditor performs checks to ensure operations like table creation
@@ -38,7 +38,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class NamespaceAuditor {
- private static final Log LOG = LogFactory.getLog(NamespaceAuditor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(NamespaceAuditor.class);
private NamespaceStateManager stateManager;
private MasterServices masterServices;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/namespace/NamespaceStateManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/namespace/NamespaceStateManager.java
index c62594adc8..3cf5a25723 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/namespace/NamespaceStateManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/namespace/NamespaceStateManager.java
@@ -22,8 +22,6 @@ import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.TableName;
@@ -33,6 +31,8 @@ import org.apache.hadoop.hbase.master.TableNamespaceManager;
import org.apache.hadoop.hbase.quotas.QuotaExceededException;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* NamespaceStateManager manages state (in terms of quota) of all the namespaces. It contains
@@ -41,7 +41,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
class NamespaceStateManager {
- private static final Log LOG = LogFactory.getLog(NamespaceStateManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(NamespaceStateManager.class);
private ConcurrentMap nsStateCache;
private MasterServices master;
private volatile boolean initialized = false;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Procedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Procedure.java
index 1e12304b76..15e882b81d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Procedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Procedure.java
@@ -25,9 +25,9 @@ import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionListener;
@@ -67,7 +67,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
*/
@InterfaceAudience.Private
public class Procedure implements Callable, ForeignExceptionListener {
- private static final Log LOG = LogFactory.getLog(Procedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Procedure.class);
//
// Arguments and naming
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureCoordinator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureCoordinator.java
index 2f67d41bee..c9f068b632 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureCoordinator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureCoordinator.java
@@ -29,9 +29,9 @@ import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.DaemonThreadFactory;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
@@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker;
*/
@InterfaceAudience.Private
public class ProcedureCoordinator {
- private static final Log LOG = LogFactory.getLog(ProcedureCoordinator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ProcedureCoordinator.class);
final static long KEEP_ALIVE_MILLIS_DEFAULT = 5000;
final static long TIMEOUT_MILLIS_DEFAULT = 60000;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureManagerHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureManagerHost.java
index 09a2972030..af4d2d7104 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureManagerHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureManagerHost.java
@@ -23,11 +23,11 @@ import java.util.HashSet;
import java.util.Set;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Provides the common setup framework and runtime services for globally
@@ -44,7 +44,7 @@ public abstract class ProcedureManagerHost {
public static final String MASTER_PROCEDURE_CONF_KEY =
"hbase.procedure.master.classes";
- private static final Log LOG = LogFactory.getLog(ProcedureManagerHost.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ProcedureManagerHost.class);
protected Set procedures = new HashSet<>();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureMember.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureMember.java
index c8399ba4bd..86923ae16e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureMember.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureMember.java
@@ -26,9 +26,9 @@ import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.DaemonThreadFactory;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
@@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker;
*/
@InterfaceAudience.Private
public class ProcedureMember implements Closeable {
- private static final Log LOG = LogFactory.getLog(ProcedureMember.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ProcedureMember.class);
final static long KEEP_ALIVE_MILLIS_DEFAULT = 5000;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/RegionServerProcedureManagerHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/RegionServerProcedureManagerHost.java
index 0f4ea64586..5cb2529f51 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/RegionServerProcedureManagerHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/RegionServerProcedureManagerHost.java
@@ -19,13 +19,13 @@ package org.apache.hadoop.hbase.procedure;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.procedure.flush.RegionServerFlushTableProcedureManager;
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
import org.apache.hadoop.hbase.regionserver.snapshot.RegionServerSnapshotManager;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Provides the globally barriered procedure framework and environment
@@ -36,8 +36,8 @@ import org.apache.zookeeper.KeeperException;
public class RegionServerProcedureManagerHost extends
ProcedureManagerHost {
- private static final Log LOG = LogFactory
- .getLog(RegionServerProcedureManagerHost.class);
+ private static final Logger LOG = LoggerFactory
+ .getLogger(RegionServerProcedureManagerHost.class);
public void initialize(RegionServerServices rss) throws KeeperException {
for (RegionServerProcedureManager proc : procedures) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java
index 892733828e..6416e6a65b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java
@@ -21,14 +21,14 @@ import java.io.IOException;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionListener;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;
import org.apache.hadoop.hbase.errorhandling.TimeoutExceptionInjector;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Distributed procedure member's Subprocedure. A procedure is sarted on a ProcedureCoordinator
@@ -51,7 +51,7 @@ import org.apache.zookeeper.KeeperException;
* barrierName. (ex: snapshot121126).
*/
abstract public class Subprocedure implements Callable {
- private static final Log LOG = LogFactory.getLog(Subprocedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Subprocedure.class);
// Name of the procedure
final private String barrierName;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureCoordinator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureCoordinator.java
index 609ce8ee3b..c1fb8f5c1a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureCoordinator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureCoordinator.java
@@ -22,8 +22,6 @@ import java.io.InterruptedIOException;
import java.util.Arrays;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
@@ -31,13 +29,15 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* ZooKeeper based {@link ProcedureCoordinatorRpcs} for a {@link ProcedureCoordinator}
*/
@InterfaceAudience.Private
public class ZKProcedureCoordinator implements ProcedureCoordinatorRpcs {
- private static final Log LOG = LogFactory.getLog(ZKProcedureCoordinator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ZKProcedureCoordinator.class);
private ZKProcedureUtil zkProc = null;
protected ProcedureCoordinator coordinator = null; // if started this should be non-null
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java
index 45e6760967..ea41ae8972 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java
@@ -21,8 +21,6 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
@@ -31,6 +29,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* ZooKeeper based controller for a procedure member.
@@ -52,7 +52,7 @@ import org.apache.zookeeper.KeeperException;
*/
@InterfaceAudience.Private
public class ZKProcedureMemberRpcs implements ProcedureMemberRpcs {
- private static final Log LOG = LogFactory.getLog(ZKProcedureMemberRpcs.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ZKProcedureMemberRpcs.class);
private final ZKProcedureUtil zkController;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureUtil.java
index 0349290eab..976e36b49b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureUtil.java
@@ -21,14 +21,14 @@ import java.io.Closeable;
import java.io.IOException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.zookeeper.ZKListener;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This is a shared ZooKeeper-based znode management utils for distributed procedure. All znode
@@ -52,7 +52,7 @@ import org.apache.zookeeper.KeeperException;
public abstract class ZKProcedureUtil
extends ZKListener implements Closeable {
- private static final Log LOG = LogFactory.getLog(ZKProcedureUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ZKProcedureUtil.class);
public static final String ACQUIRED_BARRIER_ZNODE_DEFAULT = "acquired";
public static final String REACHED_BARRIER_ZNODE_DEFAULT = "reached";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/FlushTableSubprocedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/FlushTableSubprocedure.java
index 1b4c561c23..5c005a75a9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/FlushTableSubprocedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/FlushTableSubprocedure.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.procedure.flush;
import java.util.List;
import java.util.concurrent.Callable;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
import org.apache.hadoop.hbase.procedure.ProcedureMember;
@@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.regionserver.HRegion;
*/
@InterfaceAudience.Private
public class FlushTableSubprocedure extends Subprocedure {
- private static final Log LOG = LogFactory.getLog(FlushTableSubprocedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FlushTableSubprocedure.class);
private final String table;
private final List regions;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/MasterFlushTableProcedureManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/MasterFlushTableProcedureManager.java
index 66f9240f7b..55d73d87e7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/MasterFlushTableProcedureManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/MasterFlushTableProcedureManager.java
@@ -25,8 +25,6 @@ import java.util.Map;
import java.util.Set;
import java.util.concurrent.ThreadPoolExecutor;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.MetaTableAccessor;
@@ -47,7 +45,8 @@ import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
@@ -65,7 +64,7 @@ public class MasterFlushTableProcedureManager extends MasterProcedureManager {
"hbase.flush.procedure.master.threads";
private static final int FLUSH_PROC_POOL_THREADS_DEFAULT = 1;
- private static final Log LOG = LogFactory.getLog(MasterFlushTableProcedureManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MasterFlushTableProcedureManager.class);
private MasterServices master;
private ProcedureCoordinator coordinator;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/RegionServerFlushTableProcedureManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/RegionServerFlushTableProcedureManager.java
index d328561ce2..508f0cc2f6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/RegionServerFlushTableProcedureManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/RegionServerFlushTableProcedureManager.java
@@ -29,8 +29,6 @@ import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
@@ -51,13 +49,15 @@ import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This manager class handles flushing of the regions for table on a {@link HRegionServer}.
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class RegionServerFlushTableProcedureManager extends RegionServerProcedureManager {
- private static final Log LOG = LogFactory.getLog(RegionServerFlushTableProcedureManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionServerFlushTableProcedureManager.class);
private static final String CONCURENT_FLUSH_TASKS_KEY =
"hbase.flush.procedure.region.concurrentTasks";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/DefaultOperationQuota.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/DefaultOperationQuota.java
index 38fc488e48..a15aeb6852 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/DefaultOperationQuota.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/DefaultOperationQuota.java
@@ -21,17 +21,17 @@ package org.apache.hadoop.hbase.quotas;
import java.util.Arrays;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Result;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class DefaultOperationQuota implements OperationQuota {
- private static final Log LOG = LogFactory.getLog(DefaultOperationQuota.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DefaultOperationQuota.class);
private final List limiters;
private long writeAvailable = 0;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileSystemUtilizationChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileSystemUtilizationChore.java
index a76e9c1937..eded076472 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileSystemUtilizationChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileSystemUtilizationChore.java
@@ -23,8 +23,6 @@ import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -34,13 +32,15 @@ import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A chore which computes the size of each {@link HRegion} on the FileSystem hosted by the given {@link HRegionServer}.
*/
@InterfaceAudience.Private
public class FileSystemUtilizationChore extends ScheduledChore {
- private static final Log LOG = LogFactory.getLog(FileSystemUtilizationChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FileSystemUtilizationChore.class);
static final String FS_UTILIZATION_CHORE_PERIOD_KEY = "hbase.regionserver.quotas.fs.utilization.chore.period";
static final int FS_UTILIZATION_CHORE_PERIOD_DEFAULT = 1000 * 60 * 5; // 5 minutes in millis
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/MasterQuotaManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/MasterQuotaManager.java
index e4fa3eaa1d..79be1ac6bc 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/MasterQuotaManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/MasterQuotaManager.java
@@ -28,8 +28,6 @@ import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.commons.lang3.builder.HashCodeBuilder;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
@@ -41,7 +39,8 @@ import org.apache.hadoop.hbase.namespace.NamespaceAuditor;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -59,7 +58,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRe
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class MasterQuotaManager implements RegionStateListener {
- private static final Log LOG = LogFactory.getLog(MasterQuotaManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MasterQuotaManager.class);
private static final Map EMPTY_MAP = Collections.unmodifiableMap(
new HashMap<>());
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaCache.java
index 6a5e38c5e5..adabdac6a8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaCache.java
@@ -30,14 +30,14 @@ import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@@ -57,7 +57,7 @@ import org.apache.hadoop.security.UserGroupInformation;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class QuotaCache implements Stoppable {
- private static final Log LOG = LogFactory.getLog(QuotaCache.class);
+ private static final Logger LOG = LoggerFactory.getLogger(QuotaCache.class);
public static final String REFRESH_CONF_KEY = "hbase.quota.refresh.period";
private static final int REFRESH_DEFAULT_PERIOD = 5 * 60000; // 5min
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaObserverChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaObserverChore.java
index bfbda35cbc..39048d79ee 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaObserverChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaObserverChore.java
@@ -26,8 +26,6 @@ import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.Stoppable;
@@ -40,7 +38,8 @@ import org.apache.hadoop.hbase.master.MetricsMaster;
import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot.SpaceQuotaStatus;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.HashMultimap;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables;
@@ -53,7 +52,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota;
*/
@InterfaceAudience.Private
public class QuotaObserverChore extends ScheduledChore {
- private static final Log LOG = LogFactory.getLog(QuotaObserverChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(QuotaObserverChore.class);
static final String QUOTA_OBSERVER_CHORE_PERIOD_KEY =
"hbase.master.quotas.observer.chore.period";
static final int QUOTA_OBSERVER_CHORE_PERIOD_DEFAULT = 1000 * 60 * 1; // 1 minutes in millis
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java
index f3a745ca3f..6bc3ce9d47 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java
@@ -23,8 +23,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -34,6 +32,8 @@ import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
@@ -52,7 +52,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class QuotaUtil extends QuotaTableUtil {
- private static final Log LOG = LogFactory.getLog(QuotaUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(QuotaUtil.class);
public static final String QUOTA_CONF_KEY = "hbase.quota.enabled";
private static final boolean QUOTA_ENABLED_DEFAULT = false;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerRpcQuotaManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerRpcQuotaManager.java
index 51b8cc950b..62e06146b3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerRpcQuotaManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerRpcQuotaManager.java
@@ -22,10 +22,10 @@ import java.io.IOException;
import java.util.List;
import java.util.Optional;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.ipc.RpcScheduler;
import org.apache.hadoop.hbase.ipc.RpcServer;
@@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class RegionServerRpcQuotaManager {
- private static final Log LOG = LogFactory.getLog(RegionServerRpcQuotaManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionServerRpcQuotaManager.class);
private final RegionServerServices rsServices;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerSpaceQuotaManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerSpaceQuotaManager.java
index 80bbdc3626..0a998dc469 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerSpaceQuotaManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerSpaceQuotaManager.java
@@ -24,10 +24,10 @@ import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicReference;
import java.util.Map.Entry;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot.SpaceQuotaStatus;
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
@@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public class RegionServerSpaceQuotaManager {
- private static final Log LOG = LogFactory.getLog(RegionServerSpaceQuotaManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionServerSpaceQuotaManager.class);
private final RegionServerServices rsServices;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SnapshotQuotaObserverChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SnapshotQuotaObserverChore.java
index 4f2efc7e37..78bbf755e8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SnapshotQuotaObserverChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SnapshotQuotaObserverChore.java
@@ -32,8 +32,6 @@ import java.util.function.Predicate;
import java.util.stream.Collectors;
import org.apache.commons.lang3.builder.HashCodeBuilder;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -43,6 +41,8 @@ import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Table;
@@ -67,7 +67,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap;
*/
@InterfaceAudience.Private
public class SnapshotQuotaObserverChore extends ScheduledChore {
- private static final Log LOG = LogFactory.getLog(SnapshotQuotaObserverChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SnapshotQuotaObserverChore.class);
static final String SNAPSHOT_QUOTA_CHORE_PERIOD_KEY =
"hbase.master.quotas.snapshot.chore.period";
static final int SNAPSHOT_QUOTA_CHORE_PERIOD_DEFAULT = 1000 * 60 * 5; // 5 minutes in millis
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SpaceLimitingException.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SpaceLimitingException.java
index bdacd334a8..526f2e8602 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SpaceLimitingException.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SpaceLimitingException.java
@@ -17,9 +17,9 @@
package org.apache.hadoop.hbase.quotas;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* An Exception that is thrown when a space quota is in violation.
@@ -27,7 +27,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Public
public class SpaceLimitingException extends QuotaExceededException {
private static final long serialVersionUID = 2319438922387583600L;
- private static final Log LOG = LogFactory.getLog(SpaceLimitingException.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SpaceLimitingException.class);
private static final String MESSAGE_PREFIX = SpaceLimitingException.class.getName() + ": ";
private final String policyName;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SpaceQuotaRefresherChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SpaceQuotaRefresherChore.java
index e86e9ceecb..d3be620877 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SpaceQuotaRefresherChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SpaceQuotaRefresherChore.java
@@ -22,12 +22,12 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
@@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.util.Bytes;
*/
@InterfaceAudience.Private
public class SpaceQuotaRefresherChore extends ScheduledChore {
- private static final Log LOG = LogFactory.getLog(SpaceQuotaRefresherChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SpaceQuotaRefresherChore.class);
static final String POLICY_REFRESHER_CHORE_PERIOD_KEY =
"hbase.regionserver.quotas.policy.refresher.chore.period";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableQuotaSnapshotStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableQuotaSnapshotStore.java
index dfaabec026..664e26848a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableQuotaSnapshotStore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableQuotaSnapshotStore.java
@@ -24,8 +24,6 @@ import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.TableName;
@@ -37,7 +35,8 @@ import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot.SpaceQuotaStatus;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Predicate;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
@@ -50,7 +49,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota;
*/
@InterfaceAudience.Private
public class TableQuotaSnapshotStore implements QuotaSnapshotStore {
- private static final Log LOG = LogFactory.getLog(TableQuotaSnapshotStore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableQuotaSnapshotStore.class);
private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
private final ReadLock rlock = lock.readLock();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableSpaceQuotaSnapshotNotifier.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableSpaceQuotaSnapshotNotifier.java
index f9813e55f4..d81d7d304a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableSpaceQuotaSnapshotNotifier.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableSpaceQuotaSnapshotNotifier.java
@@ -18,18 +18,18 @@ package org.apache.hadoop.hbase.quotas;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A {@link SpaceQuotaSnapshotNotifier} which uses the hbase:quota table.
*/
public class TableSpaceQuotaSnapshotNotifier implements SpaceQuotaSnapshotNotifier {
- private static final Log LOG = LogFactory.getLog(TableSpaceQuotaSnapshotNotifier.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableSpaceQuotaSnapshotNotifier.class);
private Connection conn;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/DisableTableViolationPolicyEnforcement.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/DisableTableViolationPolicyEnforcement.java
index 806cc763f6..183eac9e95 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/DisableTableViolationPolicyEnforcement.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/DisableTableViolationPolicyEnforcement.java
@@ -18,11 +18,11 @@ package org.apache.hadoop.hbase.quotas.policies;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TableNotDisabledException;
import org.apache.hadoop.hbase.TableNotEnabledException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.quotas.SpaceLimitingException;
import org.apache.hadoop.hbase.quotas.SpaceViolationPolicy;
@@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement;
*/
@InterfaceAudience.Private
public class DisableTableViolationPolicyEnforcement extends DefaultViolationPolicyEnforcement {
- private static final Log LOG = LogFactory.getLog(DisableTableViolationPolicyEnforcement.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DisableTableViolationPolicyEnforcement.class);
@Override
public void enable() throws IOException {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/NoWritesCompactionsViolationPolicyEnforcement.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/NoWritesCompactionsViolationPolicyEnforcement.java
index 5d5af2f38d..66dfee950b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/NoWritesCompactionsViolationPolicyEnforcement.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/NoWritesCompactionsViolationPolicyEnforcement.java
@@ -18,9 +18,9 @@ package org.apache.hadoop.hbase.quotas.policies;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.quotas.SpaceViolationPolicy;
import org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement;
@@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement;
@InterfaceAudience.Private
public class NoWritesCompactionsViolationPolicyEnforcement
extends NoWritesViolationPolicyEnforcement {
- private static final Log LOG = LogFactory.getLog(
+ private static final Logger LOG = LoggerFactory.getLogger(
NoWritesCompactionsViolationPolicyEnforcement.class);
private AtomicBoolean disableCompactions = new AtomicBoolean(false);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMemStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMemStore.java
index 4614935cfc..bf150a4c1d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMemStore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMemStore.java
@@ -24,12 +24,12 @@ import java.util.List;
import java.util.NavigableSet;
import java.util.SortedSet;
-import org.apache.commons.logging.Log;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.ExtendedCell;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
import org.apache.hadoop.hbase.exceptions.UnexpectedStateException;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
@@ -193,7 +193,7 @@ public abstract class AbstractMemStore implements MemStore {
return conf;
}
- protected void dump(Log log) {
+ protected void dump(Logger log) {
active.dump(log);
snapshot.dump(log);
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.java
index 76840b70b4..2fdab81313 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.java
@@ -22,10 +22,10 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.CellSink;
/**
@@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.regionserver.CellSink;
@InterfaceAudience.Private
public abstract class AbstractMultiFileWriter implements CellSink, ShipperListener {
- private static final Log LOG = LogFactory.getLog(AbstractMultiFileWriter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AbstractMultiFileWriter.class);
/** Factory that is used to produce single StoreFile.Writer-s */
protected WriterFactory writerFactory;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AnnotationReadingPriorityFunction.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AnnotationReadingPriorityFunction.java
index ac0379bf96..4eb3419ad8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AnnotationReadingPriorityFunction.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AnnotationReadingPriorityFunction.java
@@ -21,11 +21,11 @@ import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.ipc.PriorityFunction;
import org.apache.hadoop.hbase.ipc.QosPriority;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
@@ -67,8 +67,8 @@ import org.apache.hadoop.hbase.security.User;
//to figure out whether it is a meta region or not.
@InterfaceAudience.Private
public class AnnotationReadingPriorityFunction implements PriorityFunction {
- private static final Log LOG =
- LogFactory.getLog(AnnotationReadingPriorityFunction.class.getName());
+ private static final Logger LOG =
+ LoggerFactory.getLogger(AnnotationReadingPriorityFunction.class.getName());
/** Used to control the scan delay, currently sqrt(numNextCall * weight) */
public static final String SCAN_VTIME_WEIGHT_CONF_KEY = "hbase.ipc.server.scan.vtime.weight";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BusyRegionSplitPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BusyRegionSplitPolicy.java
index 0b251153df..d51d29441e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BusyRegionSplitPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BusyRegionSplitPolicy.java
@@ -18,11 +18,11 @@
package org.apache.hadoop.hbase.regionserver;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
/**
@@ -39,7 +39,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class BusyRegionSplitPolicy extends IncreasingToUpperBoundRegionSplitPolicy {
- private static final Log LOG = LogFactory.getLog(BusyRegionSplitPolicy.class);
+ private static final Logger LOG = LoggerFactory.getLogger(BusyRegionSplitPolicy.class);
// Maximum fraction blocked write requests before region is considered for split
private float maxBlockedRequests;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellFlatMap.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellFlatMap.java
index fe510ae457..17e64b0678 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellFlatMap.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellFlatMap.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.hbase.regionserver;
import org.apache.hadoop.hbase.Cell;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.util.Collection;
import java.util.Comparator;
@@ -28,8 +30,6 @@ import java.util.Map;
import java.util.NavigableSet;
import java.util.NavigableMap;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
/**
@@ -44,7 +44,7 @@ import org.apache.commons.logging.LogFactory;
*/
@InterfaceAudience.Private
public abstract class CellFlatMap implements NavigableMap {
- private static final Log LOG = LogFactory.getLog(CellFlatMap.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CellFlatMap.class);
private final Comparator super Cell> comparator;
protected int minCellIdx = 0; // the index of the minimal cell (for sub-sets)
protected int maxCellIdx = 0; // the index of the cell after the maximal cell (for sub-sets)
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java
index faf517bac5..a0cad8ac8c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java
@@ -30,9 +30,9 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.LongAdder;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.HeapMemoryManager.HeapMemoryTuneObserver;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.StringUtils;
@@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa
*/
@InterfaceAudience.Private
public class ChunkCreator {
- private static final Log LOG = LogFactory.getLog(ChunkCreator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ChunkCreator.class);
// monotonically increasing chunkid
private AtomicInteger chunkID = new AtomicInteger(1);
// maps the chunk against the monotonically increasing chunk id. We need to preserve the
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java
index a8459da395..e143511425 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java
@@ -37,8 +37,6 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.IntSupplier;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.conf.ConfigurationManager;
import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;
@@ -55,7 +53,8 @@ import org.apache.hadoop.hbase.util.StealJobQueue;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
@@ -64,7 +63,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
*/
@InterfaceAudience.Private
public class CompactSplit implements CompactionRequester, PropagatingConfigurationObserver {
- private static final Log LOG = LogFactory.getLog(CompactSplit.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CompactSplit.class);
// Configuration key for the large compaction threads.
public final static String LARGE_COMPACTION_THREADS =
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactedHFilesDischarger.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactedHFilesDischarger.java
index 8a0dee67e8..3074dad53d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactedHFilesDischarger.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactedHFilesDischarger.java
@@ -19,14 +19,13 @@ package org.apache.hadoop.hbase.regionserver;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.executor.EventType;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -36,7 +35,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public class CompactedHFilesDischarger extends ScheduledChore {
- private static final Log LOG = LogFactory.getLog(CompactedHFilesDischarger.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CompactedHFilesDischarger.class);
private RegionServerServices regionServerServices;
// Default is to use executor
@VisibleForTesting
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java
index f1232f84f2..7b885ff1af 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java
@@ -26,14 +26,14 @@ import java.util.List;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.MemoryCompactionPolicy;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@@ -69,7 +69,7 @@ public class CompactingMemStore extends AbstractMemStore {
"hbase.memstore.inmemoryflush.threshold.factor";
private static final double IN_MEMORY_FLUSH_THRESHOLD_FACTOR_DEFAULT = 0.02;
- private static final Log LOG = LogFactory.getLog(CompactingMemStore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CompactingMemStore.class);
private HStore store;
private RegionServicesForStores regionServices;
private CompactionPipeline pipeline;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionPipeline.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionPipeline.java
index 2f479e9016..49abe72fa3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionPipeline.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionPipeline.java
@@ -23,9 +23,9 @@ import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
@@ -54,7 +54,7 @@ import org.apache.hadoop.hbase.util.ClassSize;
*/
@InterfaceAudience.Private
public class CompactionPipeline {
- private static final Log LOG = LogFactory.getLog(CompactionPipeline.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CompactionPipeline.class);
public final static long FIXED_OVERHEAD = ClassSize
.align(ClassSize.OBJECT + (3 * ClassSize.REFERENCE) + Bytes.SIZEOF_LONG);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java
index 93658193c1..5e8a8b3f1b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java
@@ -24,12 +24,11 @@ import java.util.LinkedList;
import java.util.List;
import java.util.SortedSet;
-import org.apache.commons.logging.Log;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -281,7 +280,7 @@ public class CompositeImmutableSegment extends ImmutableSegment {
/**
* Dumps all cells of the segment into the given log
*/
- void dump(Log log) {
+ void dump(Logger log) {
for (ImmutableSegment s : segments) {
s.dump(log);
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.java
index 13c344150c..2ff7d58b82 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.java
@@ -24,9 +24,9 @@ import static org.apache.hadoop.hbase.HConstants.HFILE_BLOCK_CACHE_SIZE_KEY;
import static org.apache.hadoop.hbase.regionserver.HeapMemoryManager.MEMSTORE_SIZE_MAX_RANGE_KEY;
import static org.apache.hadoop.hbase.regionserver.HeapMemoryManager.MEMSTORE_SIZE_MIN_RANGE_KEY;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.io.util.MemorySizeUtil;
@@ -93,7 +93,7 @@ class DefaultHeapMemoryTuner implements HeapMemoryTuner {
// NEUTRAL(given that last tuner period was also NEUTRAL).
private static final double TUNER_STEP_EPS = 1e-6;
- private Log LOG = LogFactory.getLog(DefaultHeapMemoryTuner.class);
+ private Logger LOG = LoggerFactory.getLogger(DefaultHeapMemoryTuner.class);
private TunerResult TUNER_RESULT = new TunerResult(true);
private Configuration conf;
private float sufficientMemoryLevel = DEFAULT_SUFFICIENT_MEMORY_LEVEL_VALUE;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultMemStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultMemStore.java
index 0e0276a1f4..061e4d0733 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultMemStore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultMemStore.java
@@ -24,8 +24,6 @@ import java.lang.management.RuntimeMXBean;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
@@ -33,6 +31,8 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@@ -55,7 +55,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
*/
@InterfaceAudience.Private
public class DefaultMemStore extends AbstractMemStore {
- private static final Log LOG = LogFactory.getLog(DefaultMemStore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DefaultMemStore.class);
public final static long DEEP_OVERHEAD = ClassSize.align(AbstractMemStore.DEEP_OVERHEAD);
public final static long FIXED_OVERHEAD = ClassSize.align(AbstractMemStore.FIXED_OVERHEAD);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.java
index e1f31bbe44..47d22b50d8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.java
@@ -26,15 +26,14 @@ import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableCollection;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables;
@@ -44,7 +43,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables;
*/
@InterfaceAudience.Private
class DefaultStoreFileManager implements StoreFileManager {
- private static final Log LOG = LogFactory.getLog(DefaultStoreFileManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DefaultStoreFileManager.class);
private final CellComparator cellComparator;
private final CompactionConfiguration comConf;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java
index d666ba9e95..b3f0a44dc8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java
@@ -22,9 +22,9 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
@@ -36,7 +36,7 @@ import org.apache.hadoop.util.StringUtils;
*/
@InterfaceAudience.Private
public class DefaultStoreFlusher extends StoreFlusher {
- private static final Log LOG = LogFactory.getLog(DefaultStoreFlusher.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DefaultStoreFlusher.class);
private final Object flushLock = new Object();
public DefaultStoreFlusher(Configuration conf, HStore store) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DelimitedKeyPrefixRegionSplitPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DelimitedKeyPrefixRegionSplitPolicy.java
index 51790140aa..483c155f83 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DelimitedKeyPrefixRegionSplitPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DelimitedKeyPrefixRegionSplitPolicy.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.regionserver;
import java.util.Arrays;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.Bytes;
/**
@@ -41,8 +41,8 @@ import org.apache.hadoop.hbase.util.Bytes;
@InterfaceAudience.Private
public class DelimitedKeyPrefixRegionSplitPolicy extends IncreasingToUpperBoundRegionSplitPolicy {
- private static final Log LOG = LogFactory
- .getLog(DelimitedKeyPrefixRegionSplitPolicy.class);
+ private static final Logger LOG = LoggerFactory
+ .getLogger(DelimitedKeyPrefixRegionSplitPolicy.class);
public static final String DELIMITER_KEY = "DelimitedKeyPrefixRegionSplitPolicy.delimiter";
private byte[] delimiter = null;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushAllLargeStoresPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushAllLargeStoresPolicy.java
index e4476d040b..0f0117899f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushAllLargeStoresPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushAllLargeStoresPolicy.java
@@ -21,10 +21,10 @@ import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A {@link FlushPolicy} that only flushes store larger a given threshold. If no store is large
@@ -33,7 +33,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class FlushAllLargeStoresPolicy extends FlushLargeStoresPolicy {
- private static final Log LOG = LogFactory.getLog(FlushAllLargeStoresPolicy.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FlushAllLargeStoresPolicy.class);
@Override
protected void configureForRegion(HRegion region) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushLargeStoresPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushLargeStoresPolicy.java
index 1610fd882f..74bde60397 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushLargeStoresPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushLargeStoresPolicy.java
@@ -17,10 +17,10 @@
*/
package org.apache.hadoop.hbase.regionserver;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A {@link FlushPolicy} that only flushes store larger a given threshold. If no store is large
@@ -29,7 +29,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public abstract class FlushLargeStoresPolicy extends FlushPolicy {
- private static final Log LOG = LogFactory.getLog(FlushLargeStoresPolicy.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FlushLargeStoresPolicy.class);
public static final String HREGION_COLUMNFAMILY_FLUSH_SIZE_LOWER_BOUND =
"hbase.hregion.percolumnfamilyflush.size.lower.bound";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushPolicyFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushPolicyFactory.java
index 2f273cab6c..59f925fecf 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushPolicyFactory.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushPolicyFactory.java
@@ -19,12 +19,12 @@ package org.apache.hadoop.hbase.regionserver;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.util.ReflectionUtils;
@@ -37,7 +37,7 @@ import org.apache.hadoop.util.ReflectionUtils;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class FlushPolicyFactory {
- private static final Log LOG = LogFactory.getLog(FlushPolicyFactory.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FlushPolicyFactory.class);
public static final String HBASE_FLUSH_POLICY_KEY = "hbase.regionserver.flush.policy";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java
index 5db7383ab4..0e4fd808cb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java
@@ -27,8 +27,6 @@ import java.util.NavigableSet;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -58,6 +56,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.HFileArchiveUtil;
import org.apache.hadoop.hbase.util.IdLock;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* The store implementation to save MOBs (medium objects), it extends the HStore.
@@ -77,7 +77,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class HMobStore extends HStore {
- private static final Log LOG = LogFactory.getLog(HMobStore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HMobStore.class);
private MobCacheConfig mobCacheConfig;
private Path homePath;
private Path mobFamilyPath;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 8ca11848e7..74d71d3f89 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -70,8 +70,6 @@ import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.function.Function;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -198,6 +196,8 @@ import org.apache.hadoop.io.MultipleIOException;
import org.apache.hadoop.util.StringUtils;
import org.apache.htrace.core.TraceScope;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import edu.umd.cs.findbugs.annotations.Nullable;
@@ -219,7 +219,7 @@ import edu.umd.cs.findbugs.annotations.Nullable;
@SuppressWarnings("deprecation")
@InterfaceAudience.Private
public class HRegion implements HeapSize, PropagatingConfigurationObserver, Region {
- private static final Log LOG = LogFactory.getLog(HRegion.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HRegion.class);
public static final String LOAD_CFS_ON_DEMAND_CONFIG_KEY =
"hbase.hregion.scan.loadColumnFamiliesOnDemand";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java
index 4fc9ffe5cd..4788ac95c6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java
@@ -28,8 +28,6 @@ import java.util.List;
import java.util.Optional;
import java.util.UUID;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -54,7 +52,8 @@ import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import edu.umd.cs.findbugs.annotations.Nullable;
@@ -65,7 +64,7 @@ import edu.umd.cs.findbugs.annotations.Nullable;
*/
@InterfaceAudience.Private
public class HRegionFileSystem {
- private static final Log LOG = LogFactory.getLog(HRegionFileSystem.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HRegionFileSystem.class);
/** Name of the region info file that resides just under the region directory. */
public final static String REGION_INFO_FILE = ".regioninfo";
@@ -820,7 +819,7 @@ public class HRegionFileSystem {
* @param LOG log to output information
* @throws IOException if an unexpected exception occurs
*/
- void logFileSystemState(final Log LOG) throws IOException {
+ void logFileSystemState(final Logger LOG) throws IOException {
FSUtils.logFileSystemState(fs, this.getRegionDir(), LOG);
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
index e2d6ba061b..7f3cb50664 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
@@ -53,8 +53,6 @@ import java.util.function.Function;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.SystemUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -172,7 +170,8 @@ import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
@@ -224,7 +223,7 @@ public class HRegionServer extends HasThread implements
// Time to pause if master says 'please hold'. Make configurable if needed.
private static final int INIT_PAUSE_TIME_MS = 1000;
- private static final Log LOG = LogFactory.getLog(HRegionServer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HRegionServer.class);
/**
* For testing only! Set to true to skip notifying region assignment to master .
@@ -2373,15 +2372,15 @@ public class HRegionServer extends HasThread implements
public void abort(String reason, Throwable cause) {
String msg = "***** ABORTING region server " + this + ": " + reason + " *****";
if (cause != null) {
- LOG.fatal(msg, cause);
+ LOG.error(msg, cause);
} else {
- LOG.fatal(msg);
+ LOG.error(msg);
}
this.abortRequested = true;
// HBASE-4014: show list of coprocessors that were loaded to help debug
// regionserver crashes.Note that we're implicitly using
// java.util.HashSet's toString() method to print the coprocessor names.
- LOG.fatal("RegionServer abort: loaded coprocessors are: " +
+ LOG.error("RegionServer abort: loaded coprocessors are: " +
CoprocessorHost.getLoadedCoprocessors());
// Try and dump metrics if abort -- might give clue as to how fatal came about....
try {
@@ -2631,7 +2630,7 @@ public class HRegionServer extends HasThread implements
} catch (ServiceException se) {
IOException ioe = ProtobufUtil.getRemoteException(se);
if (ioe instanceof ClockOutOfSyncException) {
- LOG.fatal("Master rejected startup because clock is out of sync", ioe);
+ LOG.error("Master rejected startup because clock is out of sync", ioe);
// Re-throw IOE will cause RS to abort
throw ioe;
} else if (ioe instanceof ServerNotRunningYetException) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.java
index c2e1111e19..d3509c2dbd 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.java
@@ -18,11 +18,10 @@
*/
package org.apache.hadoop.hbase.regionserver;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
import org.apache.hadoop.hbase.trace.TraceUtil;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.LocalHBaseCluster;
@@ -34,7 +33,7 @@ import org.apache.hadoop.hbase.util.ServerCommandLine;
*/
@InterfaceAudience.Private
public class HRegionServerCommandLine extends ServerCommandLine {
- private static final Log LOG = LogFactory.getLog(HRegionServerCommandLine.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HRegionServerCommandLine.class);
private final Class extends HRegionServer> regionServerClass;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
index 80f91c8b68..9ba449dc2f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
@@ -48,8 +48,7 @@ import java.util.function.Predicate;
import java.util.function.ToLongFunction;
import java.util.stream.Collectors;
import java.util.stream.LongStream;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -98,7 +97,8 @@ import org.apache.hadoop.hbase.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableCollection;
@@ -133,7 +133,7 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat
public static final int DEFAULT_COMPACTCHECKER_INTERVAL_MULTIPLIER = 1000;
public static final int DEFAULT_BLOCKING_STOREFILE_COUNT = 10;
- private static final Log LOG = LogFactory.getLog(HStore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HStore.class);
protected final MemStore memstore;
// This stores directory in the filesystem.
@@ -2221,7 +2221,7 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat
try {
sf.deleteStoreFile();
} catch (IOException deleteEx) {
- LOG.fatal("Failed to delete store file we committed, halting " + pathToDelete, ex);
+ LOG.error("Failed to delete store file we committed, halting " + pathToDelete, ex);
Runtime.getRuntime().halt(1);
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java
index b405c86aab..93e59cfe1f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java
@@ -26,8 +26,6 @@ import java.util.OptionalLong;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -41,7 +39,8 @@ import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.util.BloomFilterFactory;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -60,7 +59,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
@InterfaceAudience.Private
public class HStoreFile implements StoreFile {
- private static final Log LOG = LogFactory.getLog(HStoreFile.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(HStoreFile.class.getName());
public static final String STORE_FILE_READER_NO_READAHEAD = "hbase.store.reader.no-readahead";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemoryManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemoryManager.java
index cfdb32dd8d..abd9b46133 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemoryManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemoryManager.java
@@ -25,14 +25,14 @@ import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ChoreService;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.Server;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.ResizableBlockCache;
import org.apache.hadoop.hbase.io.util.MemorySizeUtil;
@@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public class HeapMemoryManager {
- private static final Log LOG = LogFactory.getLog(HeapMemoryManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HeapMemoryManager.class);
private static final int CONVERT_TO_PERCENTAGE = 100;
private static final int CLUSTER_MINIMUM_MEMORY_THRESHOLD =
(int) (CONVERT_TO_PERCENTAGE * HConstants.HBASE_CLUSTER_MINIMUM_MEMORY_THRESHOLD);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.java
index 3164e1c83f..3043478556 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.java
@@ -20,12 +20,12 @@ package org.apache.hadoop.hbase.regionserver;
import java.io.IOException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.procedure2.util.StringUtils;
@@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.procedure2.util.StringUtils;
@InterfaceAudience.Private
public class IncreasingToUpperBoundRegionSplitPolicy extends ConstantSizeRegionSplitPolicy {
- private static final Log LOG = LogFactory.getLog(IncreasingToUpperBoundRegionSplitPolicy.class);
+ private static final Logger LOG = LoggerFactory.getLogger(IncreasingToUpperBoundRegionSplitPolicy.class);
protected long initialSize;
@Override
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyPrefixRegionSplitPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyPrefixRegionSplitPolicy.java
index 634bd88e0c..660da57080 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyPrefixRegionSplitPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyPrefixRegionSplitPolicy.java
@@ -19,9 +19,9 @@ package org.apache.hadoop.hbase.regionserver;
import java.util.Arrays;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A custom RegionSplitPolicy implementing a SplitPolicy that groups
@@ -32,8 +32,8 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class KeyPrefixRegionSplitPolicy extends IncreasingToUpperBoundRegionSplitPolicy {
- private static final Log LOG = LogFactory
- .getLog(KeyPrefixRegionSplitPolicy.class);
+ private static final Logger LOG = LoggerFactory
+ .getLogger(KeyPrefixRegionSplitPolicy.class);
@Deprecated
public static final String PREFIX_LENGTH_KEY_DEPRECATED = "prefix_split_key_policy.prefix_length";
public static final String PREFIX_LENGTH_KEY = "KeyPrefixRegionSplitPolicy.prefix_length";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java
index f26575d6c3..cdd92a6e10 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java
@@ -27,11 +27,11 @@ import java.util.Comparator;
import java.util.List;
import java.util.PriorityQueue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;
/**
@@ -49,7 +49,7 @@ import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;
@InterfaceAudience.Private
public class KeyValueHeap extends NonReversedNonLazyKeyValueScanner
implements KeyValueScanner, InternalScanner {
- private static final Log LOG = LogFactory.getLog(KeyValueHeap.class);
+ private static final Logger LOG = LoggerFactory.getLogger(KeyValueHeap.class);
protected PriorityQueue heap = null;
// Holds the scanners when a ever a eager close() happens. All such eagerly closed
// scans are collected and when the final scanner.close() happens will perform the
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Leases.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Leases.java
index a91a27156a..75d6076b18 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Leases.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Leases.java
@@ -18,9 +18,9 @@
*/
package org.apache.hadoop.hbase.regionserver;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.HasThread;
@@ -54,7 +54,7 @@ import java.io.IOException;
*/
@InterfaceAudience.Private
public class Leases extends HasThread {
- private static final Log LOG = LogFactory.getLog(Leases.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(Leases.class.getName());
public static final int MIN_WAIT_TIME = 100;
private final Map leases = new ConcurrentHashMap<>();
@@ -98,7 +98,7 @@ public class Leases extends HasThread {
} catch (ConcurrentModificationException e) {
continue;
} catch (Throwable e) {
- LOG.fatal("Unexpected exception killed leases thread", e);
+ LOG.error("Unexpected exception killed leases thread", e);
break;
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
index 451b886985..d4561ed895 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
@@ -25,8 +25,6 @@ import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.locks.ReentrantLock;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;
@@ -37,7 +35,8 @@ import org.apache.hadoop.hbase.util.HasThread;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -52,7 +51,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
@InterfaceAudience.Private
@VisibleForTesting
public class LogRoller extends HasThread implements Closeable {
- private static final Log LOG = LogFactory.getLog(LogRoller.class);
+ private static final Logger LOG = LoggerFactory.getLogger(LogRoller.class);
private final ReentrantLock rollLock = new ReentrantLock();
private final AtomicBoolean rollLog = new AtomicBoolean(false);
private final ConcurrentHashMap walNeedsRoll = new ConcurrentHashMap<>();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.java
index b262328d72..42302b2a5c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.java
@@ -18,10 +18,10 @@
*/
package org.apache.hadoop.hbase.regionserver;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* MemStoreCompactionStrategy is the root of a class hierarchy which defines the strategy for
@@ -38,7 +38,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public abstract class MemStoreCompactionStrategy {
- protected static final Log LOG = LogFactory.getLog(MemStoreCompactionStrategy.class);
+ protected static final Logger LOG = LoggerFactory.getLogger(MemStoreCompactionStrategy.class);
// The upper bound for the number of segments we store in the pipeline prior to merging.
public static final String COMPACTING_MEMSTORE_THRESHOLD_KEY =
"hbase.hregion.compacting.pipeline.segments.limit";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.java
index 4d97411a77..6596b2025c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.java
@@ -20,12 +20,12 @@ package org.apache.hadoop.hbase.regionserver;
import org.apache.hadoop.hbase.exceptions.IllegalArgumentIOException;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.MemoryCompactionPolicy;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
@@ -55,7 +55,7 @@ public class MemStoreCompactor {
+ ClassSize.ATOMIC_BOOLEAN // isInterrupted (the internals)
);
- private static final Log LOG = LogFactory.getLog(MemStoreCompactor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MemStoreCompactor.class);
private CompactingMemStore compactingMemStore;
// a static version of the segment list from the pipeline
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactorSegmentsIterator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactorSegmentsIterator.java
index 0f96936d98..f5d72b459b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactorSegmentsIterator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactorSegmentsIterator.java
@@ -25,14 +25,13 @@ import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.coprocessor.CoprocessorException;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.io.Closeables;
/**
@@ -42,7 +41,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.io.Closeables;
@InterfaceAudience.Private
public class MemStoreCompactorSegmentsIterator extends MemStoreSegmentsIterator {
- private static final Log LOG = LogFactory.getLog(MemStoreCompactorSegmentsIterator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MemStoreCompactorSegmentsIterator.class);
private final List kvs = new ArrayList<>();
private boolean hasMore = true;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java
index a314848cab..9e352ef428 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java
@@ -37,8 +37,6 @@ import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.LongAdder;
import java.util.concurrent.locks.ReentrantReadWriteLock;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.DroppedSnapshotException;
import org.apache.hadoop.hbase.HConstants;
@@ -55,6 +53,8 @@ import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix;
import org.apache.htrace.core.TraceScope;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Thread that flushes cache on request
@@ -67,7 +67,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
class MemStoreFlusher implements FlushRequester {
- private static final Log LOG = LogFactory.getLog(MemStoreFlusher.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MemStoreFlusher.class);
private Configuration conf;
// These two data members go together. Any entry in the one must have
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLABImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLABImpl.java
index 08588d26e8..6a253faf36 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLABImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLABImpl.java
@@ -28,14 +28,13 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReentrantLock;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.ExtendedCell;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
/**
@@ -66,7 +65,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
@InterfaceAudience.Private
public class MemStoreLABImpl implements MemStoreLAB {
- static final Log LOG = LogFactory.getLog(MemStoreLABImpl.class);
+ static final Logger LOG = LoggerFactory.getLogger(MemStoreLABImpl.class);
private AtomicReference curChunk = new AtomicReference<>();
// Lock to manage multiple handlers requesting for a chunk
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java
index f65bb66a9e..b643ecfb37 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java
@@ -26,8 +26,6 @@ import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HDFSBlocksDistribution;
@@ -46,6 +44,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.hadoop.hdfs.DFSHedgedReadMetrics;
import org.apache.hadoop.metrics2.MetricsExecutor;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Impl for exposing HRegionServer Information through Hadoop's metrics 2 system.
@@ -54,7 +54,7 @@ import org.apache.yetus.audience.InterfaceAudience;
class MetricsRegionServerWrapperImpl
implements MetricsRegionServerWrapper {
- private static final Log LOG = LogFactory.getLog(MetricsRegionServerWrapperImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MetricsRegionServerWrapperImpl.class);
private final HRegionServer regionServer;
private final MetricsWALSource metricsWALSource;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java
index 7f37bbf9b2..2aa1a82dc0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java
@@ -27,19 +27,19 @@ import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.metrics2.MetricsExecutor;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class MetricsRegionWrapperImpl implements MetricsRegionWrapper, Closeable {
- private static final Log LOG = LogFactory.getLog(MetricsRegionWrapperImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MetricsRegionWrapperImpl.class);
public static final int PERIOD = 45;
public static final String UNKNOWN = "unknown";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiVersionConcurrencyControl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiVersionConcurrencyControl.java
index 07ff281ce2..a25ef3b6c2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiVersionConcurrencyControl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiVersionConcurrencyControl.java
@@ -24,9 +24,9 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects;
import java.util.LinkedList;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
@@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.util.ClassSize;
*/
@InterfaceAudience.Private
public class MultiVersionConcurrencyControl {
- private static final Log LOG = LogFactory.getLog(MultiVersionConcurrencyControl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MultiVersionConcurrencyControl.class);
final AtomicLong readPoint = new AtomicLong(0);
final AtomicLong writePoint = new AtomicLong(0);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
index a77856aaa6..ec57c59ad2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
@@ -44,8 +44,6 @@ import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.LongAdder;
import org.apache.commons.lang3.mutable.MutableObject;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ByteBufferCell;
@@ -129,7 +127,8 @@ import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.wal.WALSplitter;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.cache.Cache;
import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheBuilder;
@@ -234,7 +233,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDe
public class RSRpcServices implements HBaseRPCErrorHandler,
AdminService.BlockingInterface, ClientService.BlockingInterface, PriorityFunction,
ConfigurationObserver {
- protected static final Log LOG = LogFactory.getLog(RSRpcServices.class);
+ protected static final Logger LOG = LoggerFactory.getLogger(RSRpcServices.class);
/** RPC scheduler to use for the region server. */
public static final String REGION_SERVER_RPC_SCHEDULER_FACTORY_CLASS =
@@ -1485,7 +1484,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
|| (e.getMessage() != null && e.getMessage().contains(
"java.lang.OutOfMemoryError"))) {
stop = true;
- LOG.fatal("Run out of memory; " + RSRpcServices.class.getSimpleName()
+ LOG.error("Run out of memory; " + RSRpcServices.class.getSimpleName()
+ " will abort itself immediately", e);
}
} finally {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
index b8356a7be3..e6a5713b8e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
@@ -32,8 +32,6 @@ import java.util.regex.Matcher;
import org.apache.commons.collections4.map.AbstractReferenceMap;
import org.apache.commons.collections4.map.ReferenceMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -91,6 +89,8 @@ import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Implements the coprocessor environment and runtime support for coprocessors
@@ -100,7 +100,7 @@ import org.apache.yetus.audience.InterfaceAudience;
public class RegionCoprocessorHost
extends CoprocessorHost {
- private static final Log LOG = LogFactory.getLog(RegionCoprocessorHost.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionCoprocessorHost.class);
// The shared data map
private static final ReferenceMap> SHARED_DATA_MAP =
new ReferenceMap<>(AbstractReferenceMap.ReferenceStrength.HARD,
@@ -142,6 +142,7 @@ public class RegionCoprocessorHost
return region;
}
+ @Override
public OnlineRegions getOnlineRegions() {
return this.services;
}
@@ -210,6 +211,7 @@ public class RegionCoprocessorHost
* @return An instance of RegionServerServices, an object NOT for general user-space Coprocessor
* consumption.
*/
+ @Override
public RegionServerServices getRegionServerServices() {
return this.rsServices;
}
@@ -553,7 +555,7 @@ public class RegionCoprocessorHost
}
});
} catch (IOException e) {
- LOG.warn(e);
+ LOG.warn(e.toString(), e);
}
}
@@ -588,7 +590,7 @@ public class RegionCoprocessorHost
}
});
} catch (IOException e) {
- LOG.warn(e);
+ LOG.warn(e.toString(), e);
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java
index 9395b2e1c6..dc1708cfbc 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java
@@ -21,8 +21,7 @@ package org.apache.hadoop.hbase.regionserver;
import java.io.IOException;
import com.google.protobuf.Service;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.SharedConnection;
@@ -41,12 +40,14 @@ import org.apache.hadoop.hbase.metrics.MetricRegistry;
import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
import org.apache.hadoop.hbase.security.User;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class RegionServerCoprocessorHost extends
CoprocessorHost {
- private static final Log LOG = LogFactory.getLog(RegionServerCoprocessorHost.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionServerCoprocessorHost.class);
private RegionServerServices rsServices;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.java
index 89847f977a..264d9858de 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.java
@@ -26,8 +26,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -53,7 +51,8 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest;
@@ -94,7 +93,7 @@ public class SecureBulkLoadManager {
private static final int RANDOM_WIDTH = 320;
private static final int RANDOM_RADIX = 32;
- private static final Log LOG = LogFactory.getLog(SecureBulkLoadManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SecureBulkLoadManager.class);
private final static FsPermission PERM_ALL_ACCESS = FsPermission.valueOf("-rwxrwxrwx");
private final static FsPermission PERM_HIDDEN = FsPermission.valueOf("-rwx--x--x");
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Segment.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Segment.java
index c054666395..121cbcae62 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Segment.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Segment.java
@@ -21,11 +21,11 @@ package org.apache.hadoop.hbase.regionserver;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
+import java.util.Objects;
import java.util.SortedSet;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
-import org.apache.commons.logging.Log;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.PrivateCellUtil;
@@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -343,9 +343,9 @@ public abstract class Segment {
/**
* Dumps all cells of the segment into the given log
*/
- void dump(Log log) {
+ void dump(Logger log) {
for (Cell cell: getCellSet()) {
- log.debug(cell);
+ log.debug(Objects.toString(cell));
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java
index fe6d01961e..1b93df936b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java
@@ -23,13 +23,13 @@ import java.util.Date;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.NonceKey;
@@ -42,7 +42,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
@InterfaceAudience.Private
public class ServerNonceManager {
public static final String HASH_NONCE_GRACE_PERIOD_KEY = "hbase.server.hashNonce.gracePeriod";
- private static final Log LOG = LogFactory.getLog(ServerNonceManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ServerNonceManager.class);
/** The time to wait in an extremely unlikely case of a conflict with a running op.
* Only here so that tests could override it and not wait. */
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ShutdownHook.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ShutdownHook.java
index 58d6327a05..f70267844e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ShutdownHook.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ShutdownHook.java
@@ -23,9 +23,9 @@ import java.lang.reflect.Field;
import java.util.HashMap;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -39,7 +39,7 @@ import org.apache.hadoop.hbase.util.Threads;
*/
@InterfaceAudience.Private
public class ShutdownHook {
- private static final Log LOG = LogFactory.getLog(ShutdownHook.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ShutdownHook.class);
private static final String CLIENT_FINALIZER_DATA_METHOD = "clientFinalizer";
/**
@@ -211,10 +211,10 @@ public class ShutdownHook {
}
return hdfsClientFinalizer;
} catch (NoSuchFieldException nsfe) {
- LOG.fatal("Couldn't find field 'clientFinalizer' in FileSystem!", nsfe);
+ LOG.error("Couldn't find field 'clientFinalizer' in FileSystem!", nsfe);
throw new RuntimeException("Failed to suppress HDFS shutdown hook");
} catch (IllegalAccessException iae) {
- LOG.fatal("Couldn't access field 'clientFinalizer' in FileSystem!", iae);
+ LOG.error("Couldn't access field 'clientFinalizer' in FileSystem!", iae);
throw new RuntimeException("Failed to suppress HDFS shutdown hook");
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java
index 924b8fe4c9..067ad920d5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java
@@ -23,9 +23,9 @@ import java.io.InterruptedIOException;
import java.net.ConnectException;
import java.net.SocketTimeoutException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -61,7 +61,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
@InterfaceAudience.Private
public class SplitLogWorker implements Runnable {
- private static final Log LOG = LogFactory.getLog(SplitLogWorker.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SplitLogWorker.class);
Thread worker;
// thread pool which executes recovery work
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitRequest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitRequest.java
index 4b1ae31233..ce5c05119a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitRequest.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitRequest.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.regionserver;
import java.security.PrivilegedAction;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -30,7 +28,8 @@ import org.apache.hadoop.hbase.regionserver.RegionServerServices.RegionStateTran
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
@@ -39,7 +38,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProto
*/
@InterfaceAudience.Private
class SplitRequest implements Runnable {
- private static final Log LOG = LogFactory.getLog(SplitRequest.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SplitRequest.class);
private final RegionInfo parent;
private final byte[] midKey;
private final HRegionServer server;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java
index 5b83acee4f..a1fe2d1e22 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java
@@ -25,14 +25,14 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HDFSBlocksDistribution;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
import org.apache.hadoop.hbase.io.HFileLink;
import org.apache.hadoop.hbase.io.HalfStoreFileReader;
@@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.util.FSUtils;
*/
@InterfaceAudience.Private
public class StoreFileInfo {
- private static final Log LOG = LogFactory.getLog(StoreFileInfo.class);
+ private static final Logger LOG = LoggerFactory.getLogger(StoreFileInfo.class);
/**
* A non-capture group, for hfiles, so that this can be embedded.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java
index a9d9292c18..924e285700 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java
@@ -28,8 +28,6 @@ import java.util.Optional;
import java.util.SortedSet;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -53,7 +51,8 @@ import org.apache.hadoop.hbase.util.BloomFilterFactory;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -62,7 +61,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.PHOENIX)
@InterfaceStability.Evolving
public class StoreFileReader {
- private static final Log LOG = LogFactory.getLog(StoreFileReader.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(StoreFileReader.class.getName());
protected BloomFilter generalBloomFilter = null;
protected BloomFilter deleteFamilyBloomFilter = null;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java
index 26977e4060..ecc812e14d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java
@@ -30,8 +30,6 @@ import java.net.InetSocketAddress;
import java.util.UUID;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -52,7 +50,8 @@ import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.RowBloomContext;
import org.apache.hadoop.hbase.util.RowColBloomContext;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
/**
@@ -61,7 +60,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
*/
@InterfaceAudience.Private
public class StoreFileWriter implements CellSink, ShipperListener {
- private static final Log LOG = LogFactory.getLog(StoreFileWriter.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(StoreFileWriter.class.getName());
private static final Pattern dash = Pattern.compile("-");
private final BloomFilterWriter generalBloomFilterWriter;
private final BloomFilterWriter deleteFamilyBloomFilterWriter;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
index 6abca134d7..7e006a3df0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
@@ -27,8 +27,6 @@ import java.util.OptionalInt;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.locks.ReentrantLock;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
@@ -51,7 +49,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.util.CollectionUtils;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -65,7 +64,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
@InterfaceAudience.Private
public class StoreScanner extends NonReversedNonLazyKeyValueScanner
implements KeyValueScanner, InternalScanner, ChangedReadersObserver {
- private static final Log LOG = LogFactory.getLog(StoreScanner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(StoreScanner.class);
// In unit tests, the store could be null
protected final HStore store;
private final CellComparator comparator;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreUtils.java
index 2ada5a99f9..a32a49302b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreUtils.java
@@ -24,12 +24,12 @@ import java.util.Optional;
import java.util.OptionalInt;
import java.util.OptionalLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Utility functions for region server storage layer.
@@ -37,7 +37,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class StoreUtils {
- private static final Log LOG = LogFactory.getLog(StoreUtils.class);
+ private static final Logger LOG = LoggerFactory.getLogger(StoreUtils.class);
/**
* Creates a deterministic hash code for store file collection.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StorefileRefresherChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StorefileRefresherChore.java
index 576aea1f8f..18f7e185ee 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StorefileRefresherChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StorefileRefresherChore.java
@@ -23,11 +23,11 @@ import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.master.cleaner.TimeToLiveHFileCleaner;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.util.StringUtils;
@@ -44,7 +44,7 @@ import org.apache.hadoop.util.StringUtils;
@InterfaceAudience.Private
public class StorefileRefresherChore extends ScheduledChore {
- private static final Log LOG = LogFactory.getLog(StorefileRefresherChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(StorefileRefresherChore.class);
/**
* The period (in milliseconds) for refreshing the store files for the secondary regions.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeMultiFileWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeMultiFileWriter.java
index 732fc061e8..fc0598d89a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeMultiFileWriter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeMultiFileWriter.java
@@ -24,13 +24,13 @@ import java.util.Collection;
import java.util.Collections;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.Bytes;
/**
@@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.util.Bytes;
@InterfaceAudience.Private
public abstract class StripeMultiFileWriter extends AbstractMultiFileWriter {
- private static final Log LOG = LogFactory.getLog(StripeMultiFileWriter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(StripeMultiFileWriter.class);
protected final CellComparator comparator;
protected List existingWriters;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreConfig.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreConfig.java
index eb2a9b6d96..61deb0b93c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreConfig.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreConfig.java
@@ -18,9 +18,9 @@
*/
package org.apache.hadoop.hbase.regionserver;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration;
@@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration;
*/
@InterfaceAudience.Private
public class StripeStoreConfig {
- private static final Log LOG = LogFactory.getLog(StripeStoreConfig.class);
+ private static final Logger LOG = LoggerFactory.getLogger(StripeStoreConfig.class);
/** The maximum number of files to compact within a stripe; same as for regular compaction. */
public static final String MAX_FILES_KEY = "hbase.store.stripe.compaction.maxFiles";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java
index 8c2636355c..03874e1d4c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java
@@ -21,14 +21,14 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequestImpl;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;
import org.apache.hadoop.hbase.regionserver.compactions.StripeCompactionPolicy;
import org.apache.hadoop.hbase.regionserver.compactions.StripeCompactor;
@@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class StripeStoreEngine extends StoreEngine {
- private static final Log LOG = LogFactory.getLog(StripeStoreEngine.class);
+ private static final Logger LOG = LoggerFactory.getLogger(StripeStoreEngine.class);
private StripeStoreConfig config;
@Override
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java
index 737e1a6df5..6a5e84c74a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java
@@ -31,8 +31,6 @@ import java.util.Map;
import java.util.Optional;
import java.util.TreeMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
@@ -44,7 +42,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ConcatenatedLists;
import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableCollection;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList;
@@ -67,7 +66,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList;
@InterfaceAudience.Private
public class StripeStoreFileManager
implements StoreFileManager, StripeCompactionPolicy.StripeInformationProvider {
- private static final Log LOG = LogFactory.getLog(StripeStoreFileManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(StripeStoreFileManager.class);
/**
* The file metadata fields that contain the stripe information.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java
index a227979e57..d2333451e3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java
@@ -24,8 +24,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CellComparator;
@@ -33,7 +31,8 @@ import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.regionserver.compactions.StripeCompactionPolicy;
import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -42,7 +41,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public class StripeStoreFlusher extends StoreFlusher {
- private static final Log LOG = LogFactory.getLog(StripeStoreFlusher.class);
+ private static final Logger LOG = LoggerFactory.getLogger(StripeStoreFlusher.class);
private final Object flushLock = new Object();
private final StripeCompactionPolicy policy;
private final StripeCompactionPolicy.StripeInformationProvider stripes;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/AbstractMultiOutputCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/AbstractMultiOutputCompactor.java
index c5ef127866..a8ffc2e4ea 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/AbstractMultiOutputCompactor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/AbstractMultiOutputCompactor.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.regionserver.compactions;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -31,6 +29,8 @@ import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.regionserver.StoreFileWriter;
import org.apache.hadoop.hbase.regionserver.StoreScanner;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Base class for implementing a Compactor which will generate multiple output files after
@@ -40,7 +40,7 @@ import org.apache.yetus.audience.InterfaceAudience;
public abstract class AbstractMultiOutputCompactor
extends Compactor {
- private static final Log LOG = LogFactory.getLog(AbstractMultiOutputCompactor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AbstractMultiOutputCompactor.class);
public AbstractMultiOutputCompactor(Configuration conf, HStore store) {
super(conf, store);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionConfiguration.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionConfiguration.java
index b8194eb55b..d2a86c1c72 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionConfiguration.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionConfiguration.java
@@ -19,11 +19,11 @@
package org.apache.hadoop.hbase.regionserver.compactions;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.StoreConfigInformation;
/**
@@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.regionserver.StoreConfigInformation;
@InterfaceAudience.Private
public class CompactionConfiguration {
- private static final Log LOG = LogFactory.getLog(CompactionConfiguration.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CompactionConfiguration.class);
public static final String HBASE_HSTORE_COMPACTION_RATIO_KEY = "hbase.hstore.compaction.ratio";
public static final String HBASE_HSTORE_COMPACTION_RATIO_OFFPEAK_KEY =
@@ -142,7 +142,7 @@ public class CompactionConfiguration {
this.dateTieredCompactionWindowFactory = conf.get(
DATE_TIERED_COMPACTION_WINDOW_FACTORY_CLASS_KEY,
DEFAULT_DATE_TIERED_COMPACTION_WINDOW_FACTORY_CLASS.getName());
- LOG.info(this);
+ LOG.info(toString());
}
@Override
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
index 014d4d1daf..9703c3b528 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
@@ -29,8 +29,6 @@ import java.util.Collection;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
@@ -62,7 +60,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.io.Closeables;
/**
@@ -71,7 +70,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.io.Closeables;
*/
@InterfaceAudience.Private
public abstract class Compactor {
- private static final Log LOG = LogFactory.getLog(Compactor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Compactor.class);
protected static final long COMPACTION_PROGRESS_LOG_INTERVAL = 60 * 1000;
protected volatile CompactionProgress progress;
protected final Configuration conf;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java
index a0c3e309c9..a6f1b9eb4b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java
@@ -25,8 +25,6 @@ import java.util.Collections;
import java.util.List;
import java.util.OptionalLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HDFSBlocksDistribution;
@@ -38,7 +36,8 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.ReflectionUtils;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterators;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@@ -65,7 +64,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.math.LongMath;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class DateTieredCompactionPolicy extends SortedCompactionPolicy {
- private static final Log LOG = LogFactory.getLog(DateTieredCompactionPolicy.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DateTieredCompactionPolicy.class);
private final RatioBasedCompactionPolicy compactionPolicyPerWindow;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactor.java
index 09dda90d90..21eaa941cd 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactor.java
@@ -21,8 +21,6 @@ import java.io.IOException;
import java.util.List;
import java.util.OptionalLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.regionserver.DateTieredMultiFileWriter;
@@ -32,6 +30,8 @@ import org.apache.hadoop.hbase.regionserver.StoreUtils;
import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
import org.apache.hadoop.hbase.security.User;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This compactor will generate StoreFile for different time ranges.
@@ -39,7 +39,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class DateTieredCompactor extends AbstractMultiOutputCompactor {
- private static final Log LOG = LogFactory.getLog(DateTieredCompactor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DateTieredCompactor.class);
public DateTieredCompactor(Configuration conf, HStore store) {
super(conf, store);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DefaultCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DefaultCompactor.java
index 14539b0e2e..41b819b5d5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DefaultCompactor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DefaultCompactor.java
@@ -21,8 +21,6 @@ import java.io.IOException;
import java.util.Collection;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.regionserver.HStore;
@@ -33,7 +31,8 @@ import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController
import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
import org.apache.hadoop.hbase.security.User;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
/**
@@ -42,7 +41,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
*/
@InterfaceAudience.Private
public class DefaultCompactor extends Compactor {
- private static final Log LOG = LogFactory.getLog(DefaultCompactor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DefaultCompactor.class);
public DefaultCompactor(Configuration conf, HStore store) {
super(conf, store);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExploringCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExploringCompactionPolicy.java
index b0942f6089..d9d10d98ae 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExploringCompactionPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExploringCompactionPolicy.java
@@ -23,12 +23,12 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.regionserver.HStoreFile;
import org.apache.hadoop.hbase.regionserver.StoreConfigInformation;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Class to pick which files if any to compact together.
@@ -38,7 +38,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class ExploringCompactionPolicy extends RatioBasedCompactionPolicy {
- private static final Log LOG = LogFactory.getLog(ExploringCompactionPolicy.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ExploringCompactionPolicy.class);
/**
* Constructor for ExploringCompactionPolicy.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExponentialCompactionWindowFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExponentialCompactionWindowFactory.java
index 67c7a24aab..7cd1ebfaf1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExponentialCompactionWindowFactory.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExponentialCompactionWindowFactory.java
@@ -18,12 +18,11 @@
package org.apache.hadoop.hbase.regionserver.compactions;
import org.apache.hadoop.hbase.shaded.com.google.common.math.LongMath;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Exponential compaction window implementation.
@@ -31,7 +30,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class ExponentialCompactionWindowFactory extends CompactionWindowFactory {
- private static final Log LOG = LogFactory.getLog(ExponentialCompactionWindowFactory.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ExponentialCompactionWindowFactory.class);
public static final String BASE_WINDOW_MILLIS_KEY =
"hbase.hstore.compaction.date.tiered.base.window.millis";
@@ -128,7 +127,7 @@ public class ExponentialCompactionWindowFactory extends CompactionWindowFactory
windowsPerTier = conf.getInt(WINDOWS_PER_TIER_KEY, 4);
maxTierAgeMillis = conf.getLong(MAX_TIER_AGE_MILLIS_KEY,
comConf.getDateTieredMaxStoreFileAgeMillis());
- LOG.info(this);
+ LOG.info(toString());
}
@Override
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/FIFOCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/FIFOCompactionPolicy.java
index 032a9c614d..32b40e1b05 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/FIFOCompactionPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/FIFOCompactionPolicy.java
@@ -23,14 +23,14 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.regionserver.HStoreFile;
import org.apache.hadoop.hbase.regionserver.StoreConfigInformation;
import org.apache.hadoop.hbase.regionserver.StoreUtils;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
*
@@ -47,7 +47,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class FIFOCompactionPolicy extends ExploringCompactionPolicy {
- private static final Log LOG = LogFactory.getLog(FIFOCompactionPolicy.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FIFOCompactionPolicy.class);
public FIFOCompactionPolicy(Configuration conf, StoreConfigInformation storeConfigInfo) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/OffPeakHours.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/OffPeakHours.java
index 3cb8843952..b920de2b57 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/OffPeakHours.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/OffPeakHours.java
@@ -17,14 +17,14 @@
*/
package org.apache.hadoop.hbase.regionserver.compactions;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
@InterfaceAudience.Private
public abstract class OffPeakHours {
- private static final Log LOG = LogFactory.getLog(OffPeakHours.class);
+ private static final Logger LOG = LoggerFactory.getLogger(OffPeakHours.class);
public static final OffPeakHours DISABLED = new OffPeakHours() {
@Override public boolean isOffPeakHour() { return false; }
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
index dba0473419..a6ea9b22f4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
@@ -24,8 +24,6 @@ import java.util.Collection;
import java.util.List;
import java.util.OptionalLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.regionserver.HStore;
@@ -35,6 +33,8 @@ import org.apache.hadoop.hbase.regionserver.StoreConfigInformation;
import org.apache.hadoop.hbase.regionserver.StoreUtils;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* The default algorithm for selecting files for compaction.
@@ -43,7 +43,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class RatioBasedCompactionPolicy extends SortedCompactionPolicy {
- private static final Log LOG = LogFactory.getLog(RatioBasedCompactionPolicy.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RatioBasedCompactionPolicy.class);
public RatioBasedCompactionPolicy(Configuration conf,
StoreConfigInformation storeConfigInfo) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/SortedCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/SortedCompactionPolicy.java
index f284489eaa..d9b3dd45fd 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/SortedCompactionPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/SortedCompactionPolicy.java
@@ -17,14 +17,13 @@ import java.util.List;
import java.util.OptionalInt;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.regionserver.HStoreFile;
import org.apache.hadoop.hbase.regionserver.StoreConfigInformation;
import org.apache.hadoop.hbase.regionserver.StoreUtils;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Predicate;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Collections2;
@@ -36,7 +35,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@InterfaceAudience.Private
public abstract class SortedCompactionPolicy extends CompactionPolicy {
- private static final Log LOG = LogFactory.getLog(SortedCompactionPolicy.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SortedCompactionPolicy.class);
public SortedCompactionPolicy(Configuration conf, StoreConfigInformation storeConfigInfo) {
super(conf, storeConfigInfo);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java
index b6de6783bc..053920dd1c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java
@@ -25,8 +25,6 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CellComparator;
@@ -42,7 +40,8 @@ import org.apache.hadoop.hbase.util.ConcatenatedLists;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList;
/**
@@ -50,7 +49,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList;
*/
@InterfaceAudience.Private
public class StripeCompactionPolicy extends CompactionPolicy {
- private final static Log LOG = LogFactory.getLog(StripeCompactionPolicy.class);
+ private final static Logger LOG = LoggerFactory.getLogger(StripeCompactionPolicy.class);
// Policy used to compact individual stripes.
private ExploringCompactionPolicy stripePolicy = null;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactor.java
index c9e591ea43..41e0a71b49 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactor.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.regionserver.compactions;
import java.io.IOException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.regionserver.HStore;
@@ -34,6 +32,8 @@ import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This is the placeholder for stripe compactor. The implementation, as well as the proper javadoc,
@@ -41,7 +41,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class StripeCompactor extends AbstractMultiOutputCompactor {
- private static final Log LOG = LogFactory.getLog(StripeCompactor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(StripeCompactor.class);
public StripeCompactor(Configuration conf, HStore store) {
super(conf, store);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/CloseRegionHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/CloseRegionHandler.java
index f48ee9260e..7583b726af 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/CloseRegionHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/CloseRegionHandler.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.regionserver.handler;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.ServerName;
@@ -32,7 +30,8 @@ import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
import org.apache.hadoop.hbase.regionserver.RegionServerServices.RegionStateTransitionContext;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
/**
@@ -45,7 +44,7 @@ public class CloseRegionHandler extends EventHandler {
// after the user regions have closed. What
// about the case where master tells us to shutdown a catalog region and we
// have a running queue of user regions to close?
- private static final Log LOG = LogFactory.getLog(CloseRegionHandler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CloseRegionHandler.class);
private final RegionServerServices rsServices;
private final RegionInfo regionInfo;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.java
index e4b3ed2d09..f408629534 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.regionserver.handler;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -37,7 +35,8 @@ import org.apache.hadoop.hbase.regionserver.RegionServerServices.PostOpenDeployC
import org.apache.hadoop.hbase.regionserver.RegionServerServices.RegionStateTransitionContext;
import org.apache.hadoop.hbase.util.CancelableProgressable;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
/**
* Handles opening of a region on a region server.
@@ -46,7 +45,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProto
*/
@InterfaceAudience.Private
public class OpenRegionHandler extends EventHandler {
- private static final Log LOG = LogFactory.getLog(OpenRegionHandler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(OpenRegionHandler.class);
protected final RegionServerServices rsServices;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/ParallelSeekHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/ParallelSeekHandler.java
index 9f66be9ba6..ed1b2c760f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/ParallelSeekHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/ParallelSeekHandler.java
@@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.regionserver.handler;
import java.io.IOException;
import java.util.concurrent.CountDownLatch;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.executor.EventHandler;
import org.apache.hadoop.hbase.executor.EventType;
@@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.regionserver.KeyValueScanner;
*/
@InterfaceAudience.Private
public class ParallelSeekHandler extends EventHandler {
- private static final Log LOG = LogFactory.getLog(ParallelSeekHandler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ParallelSeekHandler.class);
private KeyValueScanner scanner;
private Cell keyValue;
private long readPoint;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/RegionReplicaFlushHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/RegionReplicaFlushHandler.java
index 0d13aafe08..b917379930 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/RegionReplicaFlushHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/RegionReplicaFlushHandler.java
@@ -21,13 +21,13 @@ package org.apache.hadoop.hbase.regionserver.handler;
import java.io.IOException;
import java.io.InterruptedIOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.ClusterConnection;
import org.apache.hadoop.hbase.client.FlushRegionCallable;
import org.apache.hadoop.hbase.client.RegionReplicaUtil;
@@ -54,7 +54,7 @@ import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
@InterfaceAudience.Private
public class RegionReplicaFlushHandler extends EventHandler {
- private static final Log LOG = LogFactory.getLog(RegionReplicaFlushHandler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionReplicaFlushHandler.class);
private final ClusterConnection connection;
private final RpcRetryingCallerFactory rpcRetryingCallerFactory;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/WALSplitterHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/WALSplitterHandler.java
index 07e7de0819..49ab574ec5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/WALSplitterHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/WALSplitterHandler.java
@@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.regionserver.handler;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.SplitLogCounters;
@@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.util.CancelableProgressable;
*/
@InterfaceAudience.Private
public class WALSplitterHandler extends EventHandler {
- private static final Log LOG = LogFactory.getLog(WALSplitterHandler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(WALSplitterHandler.class);
private final ServerName serverName;
private final CancelableProgressable reporter;
private final AtomicInteger inProgressTasks;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
index 13ab8c86e4..a20a001e27 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
@@ -21,10 +21,10 @@ import java.io.IOException;
import java.util.List;
import java.util.concurrent.Callable;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.IsolationLevel;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
@@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class FlushSnapshotSubprocedure extends Subprocedure {
- private static final Log LOG = LogFactory.getLog(FlushSnapshotSubprocedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FlushSnapshotSubprocedure.class);
private final List regions;
private final SnapshotDescription snapshot;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java
index 6a7d83ebe4..08335ab4f1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java
@@ -30,8 +30,6 @@ import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.DaemonThreadFactory;
@@ -58,6 +56,8 @@ import org.apache.hadoop.hbase.regionserver.RegionServerServices;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This manager class handles the work dealing with snapshots for a {@link HRegionServer}.
@@ -75,7 +75,7 @@ import org.apache.zookeeper.KeeperException;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
@InterfaceStability.Unstable
public class RegionServerSnapshotManager extends RegionServerProcedureManager {
- private static final Log LOG = LogFactory.getLog(RegionServerSnapshotManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionServerSnapshotManager.class);
/** Maximum number of snapshot region tasks that can run concurrently */
private static final String CONCURENT_SNAPSHOT_TASKS_KEY = "hbase.snapshot.region.concurrentTasks";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/CompactionThroughputControllerFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/CompactionThroughputControllerFactory.java
index 55def0792b..e64390a1f2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/CompactionThroughputControllerFactory.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/CompactionThroughputControllerFactory.java
@@ -17,18 +17,18 @@
*/
package org.apache.hadoop.hbase.regionserver.throttle;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
import org.apache.hadoop.util.ReflectionUtils;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public final class CompactionThroughputControllerFactory {
- private static final Log LOG = LogFactory.getLog(CompactionThroughputControllerFactory.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CompactionThroughputControllerFactory.class);
public static final String HBASE_THROUGHPUT_CONTROLLER_KEY =
"hbase.regionserver.throughput.controller";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/FlushThroughputControllerFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/FlushThroughputControllerFactory.java
index 6311952bde..fc75c58358 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/FlushThroughputControllerFactory.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/FlushThroughputControllerFactory.java
@@ -17,18 +17,18 @@
*/
package org.apache.hadoop.hbase.regionserver.throttle;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
import org.apache.hadoop.util.ReflectionUtils;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public final class FlushThroughputControllerFactory {
- private static final Log LOG = LogFactory.getLog(FlushThroughputControllerFactory.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FlushThroughputControllerFactory.class);
public static final String HBASE_FLUSH_THROUGHPUT_CONTROLLER_KEY =
"hbase.regionserver.flush.throughput.controller";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareCompactionThroughputController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareCompactionThroughputController.java
index c56b47409f..b3c7bf37a2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareCompactionThroughputController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareCompactionThroughputController.java
@@ -17,12 +17,12 @@
*/
package org.apache.hadoop.hbase.regionserver.throttle;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
import org.apache.hadoop.hbase.regionserver.compactions.OffPeakHours;
@@ -42,8 +42,8 @@ import org.apache.hadoop.hbase.regionserver.compactions.OffPeakHours;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class PressureAwareCompactionThroughputController extends PressureAwareThroughputController {
- private final static Log LOG = LogFactory
- .getLog(PressureAwareCompactionThroughputController.class);
+ private final static Logger LOG = LoggerFactory
+ .getLogger(PressureAwareCompactionThroughputController.class);
public static final String HBASE_HSTORE_COMPACTION_MAX_THROUGHPUT_HIGHER_BOUND =
"hbase.hstore.compaction.throughput.higher.bound";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareFlushThroughputController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareFlushThroughputController.java
index bdfa99d11f..d0e9a9bf1a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareFlushThroughputController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareFlushThroughputController.java
@@ -17,12 +17,12 @@
*/
package org.apache.hadoop.hbase.regionserver.throttle;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
import org.apache.hadoop.hbase.regionserver.compactions.OffPeakHours;
@@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.regionserver.compactions.OffPeakHours;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class PressureAwareFlushThroughputController extends PressureAwareThroughputController {
- private static final Log LOG = LogFactory.getLog(PressureAwareFlushThroughputController.class);
+ private static final Logger LOG = LoggerFactory.getLogger(PressureAwareFlushThroughputController.class);
public static final String HBASE_HSTORE_FLUSH_MAX_THROUGHPUT_UPPER_BOUND =
"hbase.hstore.flush.throughput.upper.bound";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareThroughputController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareThroughputController.java
index 78413361b4..12ecfcafe4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareThroughputController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareThroughputController.java
@@ -20,12 +20,12 @@ package org.apache.hadoop.hbase.regionserver.throttle;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
import org.apache.hadoop.hbase.regionserver.compactions.OffPeakHours;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public abstract class PressureAwareThroughputController extends Configured implements
ThroughputController, Stoppable {
- private static final Log LOG = LogFactory.getLog(PressureAwareThroughputController.class);
+ private static final Logger LOG = LoggerFactory.getLogger(PressureAwareThroughputController.class);
/**
* Stores the information of one controlled compaction.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
index 992903656b..0f01595873 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
@@ -48,8 +48,6 @@ import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.commons.lang3.mutable.MutableLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -83,7 +81,8 @@ import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.util.StringUtils;
import org.apache.htrace.core.TraceScope;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -115,7 +114,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
@InterfaceAudience.Private
public abstract class AbstractFSWAL implements WAL {
- private static final Log LOG = LogFactory.getLog(AbstractFSWAL.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AbstractFSWAL.class);
protected static final int DEFAULT_SLOW_SYNC_TIME_MS = 100; // in ms
@@ -1117,8 +1116,8 @@ public abstract class AbstractFSWAL implements WAL {
if (args[0].compareTo("--dump") == 0) {
WALPrettyPrinter.run(Arrays.copyOfRange(args, 1, args.length));
} else if (args[0].compareTo("--perf") == 0) {
- LOG.fatal("Please use the WALPerformanceEvaluation tool instead. i.e.:");
- LOG.fatal(
+ LOG.error("Please use the WALPerformanceEvaluation tool instead. i.e.:");
+ LOG.error(
"\thbase org.apache.hadoop.hbase.wal.WALPerformanceEvaluation --iterations " + args[1]);
System.exit(-1);
} else if (args[0].compareTo("--split") == 0) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractProtobufLogWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractProtobufLogWriter.java
index 256ced64bd..befc5509fd 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractProtobufLogWriter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractProtobufLogWriter.java
@@ -28,14 +28,14 @@ import java.util.concurrent.atomic.AtomicLong;
import javax.crypto.spec.SecretKeySpec;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.codec.Codec;
import org.apache.hadoop.hbase.io.crypto.Cipher;
import org.apache.hadoop.hbase.io.crypto.Encryption;
@@ -56,7 +56,7 @@ import org.apache.hadoop.hbase.util.FSUtils;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public abstract class AbstractProtobufLogWriter {
- private static final Log LOG = LogFactory.getLog(AbstractProtobufLogWriter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AbstractProtobufLogWriter.class);
protected CompressionContext compressionContext;
protected Configuration conf;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
index f8355e0e87..e78b1db632 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
@@ -45,8 +45,6 @@ import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.function.Supplier;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -64,7 +62,8 @@ import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.htrace.core.TraceScope;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.hadoop.hbase.shaded.io.netty.channel.Channel;
import org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoop;
@@ -131,7 +130,7 @@ import org.apache.hadoop.hbase.shaded.io.netty.util.concurrent.SingleThreadEvent
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class AsyncFSWAL extends AbstractFSWAL {
- private static final Log LOG = LogFactory.getLog(AsyncFSWAL.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AsyncFSWAL.class);
private static final Comparator SEQ_COMPARATOR = (o1, o2) -> {
int c = Long.compare(o1.getTxid(), o2.getTxid());
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncProtobufLogWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncProtobufLogWriter.java
index 454928bb84..aa585e338c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncProtobufLogWriter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncProtobufLogWriter.java
@@ -25,8 +25,6 @@ import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.function.Consumer;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
@@ -38,7 +36,8 @@ import org.apache.hadoop.hbase.wal.AsyncFSWALProvider;
import org.apache.hadoop.hbase.wal.WAL.Entry;
import org.apache.hadoop.hbase.wal.WALKeyImpl;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
import org.apache.hadoop.hbase.shaded.io.netty.channel.Channel;
import org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoopGroup;
@@ -52,7 +51,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer;
public class AsyncProtobufLogWriter extends AbstractProtobufLogWriter
implements AsyncFSWALProvider.AsyncWriter {
- private static final Log LOG = LogFactory.getLog(AsyncProtobufLogWriter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AsyncProtobufLogWriter.class);
private final EventLoopGroup eventLoopGroup;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
index fd9d6c122a..c0f454e0b9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
@@ -35,8 +35,6 @@ import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
@@ -58,7 +56,8 @@ import org.apache.hadoop.hdfs.client.HdfsDataOutputStream;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.htrace.core.TraceScope;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -101,7 +100,7 @@ public class FSHLog extends AbstractFSWAL {
// syncs and appends have completed -- so the log roller can swap the WAL out under it.
//
// We use ring buffer sequence as txid of FSWALEntry and SyncFuture.
- private static final Log LOG = LogFactory.getLog(FSHLog.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FSHLog.class);
/**
* The nexus at which all incoming handlers meet. Does appends and sync with an ordering. Appends
@@ -162,13 +161,13 @@ public class FSHLog extends AbstractFSWAL {
@Override
public void handleOnStartException(Throwable ex) {
- LOG.error(ex);
+ LOG.error(ex.toString(), ex);
throw new RuntimeException(ex);
}
@Override
public void handleOnShutdownException(Throwable ex) {
- LOG.error(ex);
+ LOG.error(ex.toString(), ex);
throw new RuntimeException(ex);
}
}
@@ -634,6 +633,7 @@ public class FSHLog extends AbstractFSWAL {
/**
* @return true if number of replicas for the WAL is lower than threshold
*/
+ @Override
protected boolean doCheckLogLowReplication() {
boolean logRollNeeded = false;
// if the number of replicas in HDFS has fallen below the configured
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java
index ba9c0e6a69..b19f93b333 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java
@@ -23,9 +23,9 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
@@ -37,7 +37,7 @@ import org.apache.hadoop.util.StringUtils;
*/
@InterfaceAudience.Private
public class MetricsWAL implements WALActionsListener {
- private static final Log LOG = LogFactory.getLog(MetricsWAL.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MetricsWAL.class);
private final MetricsWALSource source;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java
index c199484271..6017a182f2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java
@@ -26,9 +26,9 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileSystem;
@@ -61,7 +61,7 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE
@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC, HBaseInterfaceAudience.PHOENIX,
HBaseInterfaceAudience.CONFIG})
public class ProtobufLogReader extends ReaderBase {
- private static final Log LOG = LogFactory.getLog(ProtobufLogReader.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ProtobufLogReader.class);
// public for WALFactory until we move everything to o.a.h.h.wal
@InterfaceAudience.Private
public static final byte[] PB_WAL_MAGIC = Bytes.toBytes("PWAL");
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java
index 7a135c9fe2..aeb2c19c25 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java
@@ -21,15 +21,13 @@ package org.apache.hadoop.hbase.regionserver.wal;
import java.io.IOException;
import java.io.OutputStream;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.wal.WALKeyImpl;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer;
import org.apache.hadoop.hbase.util.CommonFSUtils;
@@ -44,14 +42,14 @@ import org.apache.hadoop.hbase.wal.WAL.Entry;
public class ProtobufLogWriter extends AbstractProtobufLogWriter
implements FSHLogProvider.Writer {
- private static final Log LOG = LogFactory.getLog(ProtobufLogWriter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ProtobufLogWriter.class);
protected FSDataOutputStream output;
@Override
public void append(Entry entry) throws IOException {
entry.setCompressionContext(compressionContext);
- ((WALKeyImpl)entry.getKey()).getBuilder(compressor).
+ entry.getKey().getBuilder(compressor).
setFollowingKvCount(entry.getEdit().size()).build().writeDelimitedTo(output);
for (Cell cell : entry.getEdit().getCells()) {
// cellEncoder must assume little about the stream, since we write PB and cells in turn.
@@ -68,7 +66,7 @@ public class ProtobufLogWriter extends AbstractProtobufLogWriter
this.output.close();
} catch (NullPointerException npe) {
// Can get a NPE coming up from down in DFSClient$DFSOutputStream#close
- LOG.warn(npe);
+ LOG.warn(npe.toString(), npe);
}
this.output = null;
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ReaderBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ReaderBase.java
index 9a6bfd3958..f0573587a6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ReaderBase.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ReaderBase.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.regionserver.wal;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
@@ -34,10 +32,12 @@ import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
import org.apache.hadoop.hbase.wal.WAL.Entry;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC, HBaseInterfaceAudience.PHOENIX})
public abstract class ReaderBase implements AbstractFSWALProvider.Reader {
- private static final Log LOG = LogFactory.getLog(ReaderBase.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReaderBase.class);
protected Configuration conf;
protected FileSystem fs;
protected Path path;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureProtobufLogReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureProtobufLogReader.java
index 2765f94995..b1f17ad4ea 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureProtobufLogReader.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureProtobufLogReader.java
@@ -24,9 +24,9 @@ import java.security.KeyException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HConstants;
@@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.util.EncryptionTest;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class SecureProtobufLogReader extends ProtobufLogReader {
- private static final Log LOG = LogFactory.getLog(SecureProtobufLogReader.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SecureProtobufLogReader.class);
private Decryptor decryptor = null;
private static List writerClsNames = new ArrayList<>();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceIdAccounting.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceIdAccounting.java
index 61586548ab..f21c1f0631 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceIdAccounting.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceIdAccounting.java
@@ -30,11 +30,11 @@ import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ImmutableByteArray;
@@ -52,7 +52,7 @@ import org.apache.hadoop.hbase.util.ImmutableByteArray;
@InterfaceAudience.Private
class SequenceIdAccounting {
- private static final Log LOG = LogFactory.getLog(SequenceIdAccounting.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SequenceIdAccounting.class);
/**
* This lock ties all operations on {@link SequenceIdAccounting#flushingSequenceIds} and
* {@link #lowestUnflushedSequenceIds} Maps. {@link #lowestUnflushedSequenceIds} has the
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCoprocessorHost.java
index 0edd5d4e19..7b6182e6b6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCoprocessorHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCoprocessorHost.java
@@ -22,8 +22,6 @@ package org.apache.hadoop.hbase.regionserver.wal;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -38,6 +36,8 @@ import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Implements the coprocessor environment and runtime support for coprocessors
@@ -46,7 +46,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class WALCoprocessorHost
extends CoprocessorHost {
- private static final Log LOG = LogFactory.getLog(WALCoprocessorHost.class);
+ private static final Logger LOG = LoggerFactory.getLogger(WALCoprocessorHost.class);
/**
* Encapsulation of the environment of each coprocessor
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALUtil.java
index 518ee8fbe5..c3b67faf95 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALUtil.java
@@ -22,15 +22,14 @@ package org.apache.hadoop.hbase.regionserver.wal;
import java.io.IOException;
import java.util.NavigableMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.wal.WALKeyImpl;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor;
@@ -44,7 +43,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDe
*/
@InterfaceAudience.Private
public class WALUtil {
- private static final Log LOG = LogFactory.getLog(WALUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(WALUtil.class);
private WALUtil() {
// Shut down construction of this class.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java
index a7637b1462..c390d0967d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java
@@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.replication;
import java.io.IOException;
import java.util.ArrayList;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.AbstractService;
@@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.Abstract
public abstract class BaseReplicationEndpoint extends AbstractService
implements ReplicationEndpoint {
- private static final Log LOG = LogFactory.getLog(BaseReplicationEndpoint.class);
+ private static final Logger LOG = LoggerFactory.getLogger(BaseReplicationEndpoint.class);
public static final String REPLICATION_WALENTRYFILTER_CONFIG_KEY
= "hbase.replication.source.custom.walentryfilters";
protected Context ctx;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java
index 5f465ce6c3..d5506b17d8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java
@@ -22,21 +22,20 @@ import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.ExtendedCellBuilder;
import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
import org.apache.hadoop.hbase.wal.WALEdit;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Predicate;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor;
public class BulkLoadCellFilter {
- private static final Log LOG = LogFactory.getLog(BulkLoadCellFilter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(BulkLoadCellFilter.class);
private final ExtendedCellBuilder cellBuilder = ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
/**
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/HBaseReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/HBaseReplicationEndpoint.java
index 4985b82ae8..bd5c529092 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/HBaseReplicationEndpoint.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/HBaseReplicationEndpoint.java
@@ -24,8 +24,6 @@ import java.util.Collections;
import java.util.List;
import java.util.UUID;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.zookeeper.ZKListener;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.Abortable;
@@ -37,6 +35,8 @@ import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.KeeperException.AuthFailedException;
import org.apache.zookeeper.KeeperException.ConnectionLossException;
import org.apache.zookeeper.KeeperException.SessionExpiredException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A {@link BaseReplicationEndpoint} for replication endpoints whose
@@ -48,7 +48,7 @@ import org.apache.zookeeper.KeeperException.SessionExpiredException;
public abstract class HBaseReplicationEndpoint extends BaseReplicationEndpoint
implements Abortable {
- private static final Log LOG = LogFactory.getLog(HBaseReplicationEndpoint.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HBaseReplicationEndpoint.class);
private ZKWatcher zkw = null; // FindBugs: MT_CORRECTNESS
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/NamespaceTableCfWALEntryFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/NamespaceTableCfWALEntryFilter.java
index 9a4cc6c379..d898a421b7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/NamespaceTableCfWALEntryFilter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/NamespaceTableCfWALEntryFilter.java
@@ -22,12 +22,12 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.wal.WAL.Entry;
@@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Predicate;
@InterfaceAudience.Private
public class NamespaceTableCfWALEntryFilter implements WALEntryFilter, WALCellFilter {
- private static final Log LOG = LogFactory.getLog(NamespaceTableCfWALEntryFilter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(NamespaceTableCfWALEntryFilter.class);
private final ReplicationPeer peer;
private BulkLoadCellFilter bulkLoadFilter = new BulkLoadCellFilter();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationHFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationHFileCleaner.java
index 5972734f6d..4e9d67a036 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationHFileCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationHFileCleaner.java
@@ -21,8 +21,6 @@ import java.util.Collections;
import java.util.List;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.hbase.Abortable;
@@ -36,6 +34,8 @@ import org.apache.hadoop.hbase.replication.ReplicationQueuesClient;
import org.apache.hadoop.hbase.replication.ReplicationQueuesClientArguments;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Implementation of a file cleaner that checks if a hfile is still scheduled for replication before
@@ -43,7 +43,7 @@ import org.apache.zookeeper.KeeperException;
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class ReplicationHFileCleaner extends BaseHFileCleanerDelegate {
- private static final Log LOG = LogFactory.getLog(ReplicationHFileCleaner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationHFileCleaner.class);
private ZKWatcher zkw;
private ReplicationQueuesClient rqc;
private boolean stopped = false;
@@ -192,9 +192,7 @@ public class ReplicationHFileCleaner extends BaseHFileCleanerDelegate {
@Override
public void abort(String why, Throwable e) {
LOG.warn("ReplicationHFileCleaner received abort, ignoring. Reason: " + why);
- if (LOG.isDebugEnabled()) {
- LOG.debug(e);
- }
+ LOG.debug(e.toString(), e);
}
@Override
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationLogCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationLogCleaner.java
index 57ed8427ac..87b42b36bd 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationLogCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationLogCleaner.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.hbase.replication.master;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
@@ -40,6 +38,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Predicate;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Implementation of a log cleaner that checks if a log is still scheduled for
@@ -47,7 +47,7 @@ import org.apache.zookeeper.KeeperException;
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class ReplicationLogCleaner extends BaseLogCleanerDelegate {
- private static final Log LOG = LogFactory.getLog(ReplicationLogCleaner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationLogCleaner.class);
private ZKWatcher zkw;
private ReplicationQueuesClient replicationQueues;
private boolean stopped = false;
@@ -140,9 +140,7 @@ public class ReplicationLogCleaner extends BaseLogCleanerDelegate {
@Override
public void abort(String why, Throwable e) {
LOG.warn("ReplicationLogCleaner received abort, ignoring. Reason: " + why);
- if (LOG.isDebugEnabled()) {
- LOG.debug(e);
- }
+ LOG.debug(e.toString(), e);
}
@Override
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationPeerConfigUpgrader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationPeerConfigUpgrader.java
index 5c8fba3970..ea5509f64e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationPeerConfigUpgrader.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationPeerConfigUpgrader.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.replication.master;
import java.io.IOException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -38,7 +36,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
import org.apache.zookeeper.KeeperException;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos;
/**
@@ -49,7 +48,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos;
@InterfaceStability.Unstable
public class ReplicationPeerConfigUpgrader extends ReplicationStateZKBase {
- private static final Log LOG = LogFactory.getLog(ReplicationPeerConfigUpgrader.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationPeerConfigUpgrader.class);
public ReplicationPeerConfigUpgrader(ZKWatcher zookeeper,
Configuration conf, Abortable abortable) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DefaultSourceFSConfigurationProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DefaultSourceFSConfigurationProvider.java
index 8cfc3ce9b7..2ff5ed89fc 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DefaultSourceFSConfigurationProvider.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DefaultSourceFSConfigurationProvider.java
@@ -16,14 +16,14 @@ import java.net.URL;
import java.util.HashMap;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This will load all the xml configuration files for the source cluster replication ID from
@@ -31,7 +31,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class DefaultSourceFSConfigurationProvider implements SourceFSConfigurationProvider {
- private static final Log LOG = LogFactory.getLog(DefaultSourceFSConfigurationProvider.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DefaultSourceFSConfigurationProvider.class);
// Map containing all the source clusters configurations against their replication cluster id
private Map sourceClustersConfs = new HashMap<>();
private static final String XML = ".xml";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java
index ff5e5c7e12..93b86494de 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java
@@ -28,8 +28,6 @@ import java.util.Queue;
import java.util.Set;
import java.util.stream.Collectors;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileStatus;
@@ -59,6 +57,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.AtomicLongMap;
/**
@@ -71,7 +71,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.AtomicLo
public class DumpReplicationQueues extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(DumpReplicationQueues.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(DumpReplicationQueues.class.getName());
private List deadRegionServers;
private List deletedQueues;
@@ -417,7 +417,7 @@ public class DumpReplicationQueues extends Configured implements Tool {
public void abort(String why, Throwable e) {
LOG.warn("DumpReplicationQueue received abort, ignoring. Reason: " + why);
if (LOG.isDebugEnabled()) {
- LOG.debug(e);
+ LOG.debug(e.toString(), e);
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java
index c1ed64413d..5a69dbdf92 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java
@@ -40,8 +40,6 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Abortable;
@@ -50,6 +48,8 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.ClusterConnection;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -78,7 +78,7 @@ import org.apache.hadoop.ipc.RemoteException;
@InterfaceAudience.Private
public class HBaseInterClusterReplicationEndpoint extends HBaseReplicationEndpoint {
- private static final Log LOG = LogFactory.getLog(HBaseInterClusterReplicationEndpoint.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HBaseInterClusterReplicationEndpoint.class);
private static final long DEFAULT_MAX_TERMINATION_WAIT_MULTIPLIER = 2;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java
index eb29ac4dc9..a2cd03e7e8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java
@@ -31,8 +31,6 @@ import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
@@ -42,6 +40,8 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.Table;
@@ -70,7 +70,7 @@ public class HFileReplicator {
"hbase.replication.bulkload.copy.hfiles.perthread";
public static final int REPLICATION_BULKLOAD_COPY_HFILES_PERTHREAD_DEFAULT = 10;
- private static final Log LOG = LogFactory.getLog(HFileReplicator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HFileReplicator.class);
private static final String UNDERSCORE = "_";
private final static FsPermission PERM_ALL_ACCESS = FsPermission.valueOf("-rwxrwxrwx");
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java
index 09ddbd40bd..9ca1c84431 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java
@@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.replication.regionserver;
import java.util.HashMap;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.metrics.BaseSource;
@@ -36,7 +36,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.REPLICATION)
public class MetricsSource implements BaseSource {
- private static final Log LOG = LogFactory.getLog(MetricsSource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MetricsSource.class);
// tracks last shipped timestamp for each wal group
private Map lastTimeStamps = new HashMap<>();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java
index cabf85a7f8..bd191e3397 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java
@@ -23,8 +23,6 @@ import java.util.List;
import java.util.UUID;
import java.util.concurrent.PriorityBlockingQueue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -32,6 +30,8 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.ServerName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
import org.apache.hadoop.hbase.replication.ReplicationPeers;
import org.apache.hadoop.hbase.replication.ReplicationQueues;
@@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
@InterfaceAudience.Private
public class RecoveredReplicationSource extends ReplicationSource {
- private static final Log LOG = LogFactory.getLog(RecoveredReplicationSource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RecoveredReplicationSource.class);
private String actualPeerId;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSourceShipper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSourceShipper.java
index af84868d61..41946393bf 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSourceShipper.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSourceShipper.java
@@ -21,11 +21,11 @@ package org.apache.hadoop.hbase.replication.regionserver;
import java.io.IOException;
import java.util.concurrent.PriorityBlockingQueue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.replication.ReplicationException;
import org.apache.hadoop.hbase.replication.ReplicationQueues;
import org.apache.hadoop.hbase.replication.regionserver.ReplicationSourceWALReader.WALEntryBatch;
@@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.util.Threads;
@InterfaceAudience.Private
public class RecoveredReplicationSourceShipper extends ReplicationSourceShipper {
- private static final Log LOG = LogFactory.getLog(RecoveredReplicationSourceShipper.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RecoveredReplicationSourceShipper.class);
protected final RecoveredReplicationSource source;
private final ReplicationQueues replicationQueues;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSourceWALReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSourceWALReader.java
index edd1b2ad68..7d2a594464 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSourceWALReader.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSourceWALReader.java
@@ -20,13 +20,13 @@ package org.apache.hadoop.hbase.replication.regionserver;
import java.util.concurrent.PriorityBlockingQueue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.replication.WALEntryFilter;
/**
@@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.replication.WALEntryFilter;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class RecoveredReplicationSourceWALReader extends ReplicationSourceWALReader {
- private static final Log LOG = LogFactory.getLog(RecoveredReplicationSourceWALReader.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RecoveredReplicationSourceWALReader.class);
public RecoveredReplicationSourceWALReader(FileSystem fs, Configuration conf,
PriorityBlockingQueue logQueue, long startPosition, WALEntryFilter filter,
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java
index 3d39146cac..b9f2d0df6b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java
@@ -32,8 +32,6 @@ import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CellScanner;
@@ -70,7 +68,8 @@ import org.apache.hadoop.hbase.wal.WALSplitter.RegionEntryBuffer;
import org.apache.hadoop.hbase.wal.WALSplitter.SinkWriter;
import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.cache.Cache;
import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheBuilder;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
@@ -84,7 +83,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWA
@InterfaceAudience.Private
public class RegionReplicaReplicationEndpoint extends HBaseReplicationEndpoint {
- private static final Log LOG = LogFactory.getLog(RegionReplicaReplicationEndpoint.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionReplicaReplicationEndpoint.class);
// Can be configured differently than hbase.client.retries.number
private static String CLIENT_RETRIES_NUMBER
@@ -161,8 +160,7 @@ public class RegionReplicaReplicationEndpoint extends HBaseReplicationEndpoint {
try {
outputSink.finishWritingAndClose();
} catch (IOException ex) {
- LOG.warn("Got exception while trying to close OutputSink");
- LOG.warn(ex);
+ LOG.warn("Got exception while trying to close OutputSink", ex);
}
}
if (this.pool != null) {
@@ -583,6 +581,7 @@ public class RegionReplicaReplicationEndpoint extends HBaseReplicationEndpoint {
this.initialEncodedRegionName = regionInfo.getEncodedNameAsBytes();
}
+ @Override
public ReplicateWALEntryResponse call(HBaseRpcController controller) throws Exception {
// Check whether we should still replay this entry. If the regions are changed, or the
// entry is not coming form the primary region, filter it out because we do not need it.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java
index 2a2df60303..d8212e95ae 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java
@@ -29,8 +29,6 @@ import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.wal.WALKeyImpl;
import org.apache.yetus.audience.InterfaceAudience;
@@ -62,7 +60,8 @@ import org.apache.hadoop.hbase.replication.master.ReplicationHFileCleaner;
import org.apache.hadoop.hbase.replication.master.ReplicationLogCleaner;
import org.apache.hadoop.hbase.zookeeper.ZKClusterId;
import org.apache.zookeeper.KeeperException;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
/**
@@ -71,8 +70,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa
@InterfaceAudience.Private
public class Replication implements
ReplicationSourceService, ReplicationSinkService, WALActionsListener {
- private static final Log LOG =
- LogFactory.getLog(Replication.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(Replication.class);
private boolean replicationForBulkLoadData;
private ReplicationSourceManager replicationManager;
private ReplicationQueues replicationQueues;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationObserver.java
index e72f6e2a09..ec478d5e09 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationObserver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationObserver.java
@@ -23,8 +23,6 @@ import java.io.IOException;
import java.util.List;
import java.util.Optional;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.coprocessor.CoreCoprocessor;
@@ -32,6 +30,8 @@ import org.apache.hadoop.hbase.coprocessor.HasRegionServerServices;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
@@ -48,7 +48,7 @@ import javax.validation.constraints.Null;
@CoreCoprocessor
@InterfaceAudience.Private
public class ReplicationObserver implements RegionCoprocessor, RegionObserver {
- private static final Log LOG = LogFactory.getLog(ReplicationObserver.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationObserver.class);
@Override
public Optional getRegionObserver() {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java
index 2f9f9c5c9e..5431b807db 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java
@@ -31,8 +31,6 @@ import java.util.UUID;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
@@ -44,6 +42,8 @@ import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Delete;
@@ -78,7 +78,7 @@ import org.apache.hadoop.hbase.util.Pair;
@InterfaceAudience.Private
public class ReplicationSink {
- private static final Log LOG = LogFactory.getLog(ReplicationSink.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationSink.class);
private final Configuration conf;
// Volatile because of note in here -- look for double-checked locking:
// http://www.oracle.com/technetwork/articles/javase/bloch-effective-08-qa-140880.html
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSinkManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSinkManager.java
index eb882f3a49..6248dd3355 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSinkManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSinkManager.java
@@ -27,13 +27,13 @@ import java.util.List;
import java.util.Map;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.client.ClusterConnection;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.replication.HBaseReplicationEndpoint;
@@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.replication.HBaseReplicationEndpoint;
*/
public class ReplicationSinkManager {
- private static final Log LOG = LogFactory.getLog(ReplicationSinkManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationSinkManager.class);
/**
* Default maximum number of times a replication sink can be reported as bad before
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
index ea6c6d44af..f4f35ae0c5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
@@ -33,8 +33,6 @@ import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -45,6 +43,8 @@ import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.RSRpcServices;
import org.apache.hadoop.hbase.replication.ChainWALEntryFilter;
import org.apache.hadoop.hbase.replication.ClusterMarkingEntryFilter;
@@ -78,7 +78,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@InterfaceAudience.Private
public class ReplicationSource extends Thread implements ReplicationSourceInterface {
- private static final Log LOG = LogFactory.getLog(ReplicationSource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationSource.class);
// Queues of logs to process, entry in format of walGroupId->queue,
// each presents a queue for one wal group
private Map> queues = new HashMap<>();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceFactory.java
index e97da24a56..865a202870 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceFactory.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceFactory.java
@@ -18,10 +18,10 @@
*/
package org.apache.hadoop.hbase.replication.regionserver;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.replication.ReplicationQueueInfo;
/**
@@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.replication.ReplicationQueueInfo;
@InterfaceAudience.Private
public class ReplicationSourceFactory {
- private static final Log LOG = LogFactory.getLog(ReplicationSourceFactory.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationSourceFactory.class);
static ReplicationSourceInterface create(Configuration conf, String peerId) {
ReplicationQueueInfo replicationQueueInfo = new ReplicationQueueInfo(peerId);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
index c518ece18f..07c53e1c9e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
@@ -40,8 +40,7 @@ import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -67,7 +66,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
@@ -89,8 +89,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa
*/
@InterfaceAudience.Private
public class ReplicationSourceManager implements ReplicationListener {
- private static final Log LOG =
- LogFactory.getLog(ReplicationSourceManager.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(ReplicationSourceManager.class);
// List of all the sources that read this RS's logs
private final List sources;
// List of all the sources we got from died RSs
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipper.java
index 9dfe686d17..1e1dcc8c15 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipper.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipper.java
@@ -24,14 +24,14 @@ import java.util.Map;
import java.util.concurrent.PriorityBlockingQueue;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
import org.apache.hadoop.hbase.replication.regionserver.ReplicationSourceWALReader.WALEntryBatch;
@@ -52,7 +52,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCache;
*/
@InterfaceAudience.Private
public class ReplicationSourceShipper extends Thread {
- private static final Log LOG = LogFactory.getLog(ReplicationSourceShipper.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationSourceShipper.class);
// Hold the state of a replication worker thread
public enum WorkerState {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceWALReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceWALReader.java
index bbcaaa4d73..1ec797fb14 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceWALReader.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceWALReader.java
@@ -29,8 +29,6 @@ import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.PriorityBlockingQueue;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -45,7 +43,8 @@ import org.apache.hadoop.hbase.wal.WAL.Entry;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor;
@@ -57,7 +56,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescript
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class ReplicationSourceWALReader extends Thread {
- private static final Log LOG = LogFactory.getLog(ReplicationSourceWALReader.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationSourceWALReader.class);
private final PriorityBlockingQueue logQueue;
private final FileSystem fs;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java
index db3b1fc037..21b8ac5c8c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.replication.regionserver;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
@@ -39,6 +37,8 @@ import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* In a scenario of Replication based Disaster/Recovery, when hbase
@@ -51,7 +51,7 @@ import org.apache.hadoop.util.ToolRunner;
public class ReplicationSyncUp extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(ReplicationSyncUp.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationSyncUp.class.getName());
private static Configuration conf;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.java
index 6277d24c5b..7c83c0c1fa 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.java
@@ -25,8 +25,6 @@ import java.util.NoSuchElementException;
import java.util.OptionalLong;
import java.util.concurrent.PriorityBlockingQueue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -35,6 +33,8 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader;
import org.apache.hadoop.hbase.util.CancelableProgressable;
import org.apache.hadoop.hbase.util.FSUtils;
@@ -53,7 +53,7 @@ import org.apache.hadoop.ipc.RemoteException;
@InterfaceAudience.Private
@InterfaceStability.Evolving
class WALEntryStream implements Closeable {
- private static final Log LOG = LogFactory.getLog(WALEntryStream.class);
+ private static final Logger LOG = LoggerFactory.getLogger(WALEntryStream.class);
private Reader reader;
private Path currentPath;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java
index 7059bd8dc4..b3924350b4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java
@@ -34,9 +34,9 @@ import javax.security.sasl.Sasl;
import javax.security.sasl.SaslException;
import javax.security.sasl.SaslServer;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.SecretManager;
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
@@ -49,7 +49,7 @@ import org.apache.hadoop.security.token.TokenIdentifier;
@InterfaceAudience.Private
public class HBaseSaslRpcServer {
- private static final Log LOG = LogFactory.getLog(HBaseSaslRpcServer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HBaseSaslRpcServer.class);
private final SaslServer saslServer;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
index f5d70f66a4..89a5e17990 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
@@ -32,8 +32,6 @@ import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.AuthUtil;
import org.apache.hadoop.hbase.Cell;
@@ -75,6 +73,8 @@ import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableFactories;
import org.apache.hadoop.io.WritableUtils;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Maintains lists of permission grants to users and groups to allow for
@@ -119,7 +119,7 @@ public class AccessControlLists {
* _acl_ table info: column keys */
public static final char ACL_KEY_DELIMITER = ',';
- private static final Log LOG = LogFactory.getLog(AccessControlLists.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AccessControlLists.class);
/**
* Stores a new user permission grant in the access control lists table.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
index 0f9d8a5a8f..0bb61c921f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
@@ -39,8 +39,6 @@ import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
@@ -139,6 +137,8 @@ import org.apache.hadoop.hbase.util.SimpleMutableByteRange;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Provides basic authorization checks for data access and administrative
@@ -180,10 +180,10 @@ public class AccessController implements MasterCoprocessor, RegionCoprocessor,
MasterObserver, RegionObserver, RegionServerObserver, EndpointObserver, BulkLoadObserver {
// TODO: encapsulate observer functions into separate class/sub-class.
- private static final Log LOG = LogFactory.getLog(AccessController.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AccessController.class);
- private static final Log AUDITLOG =
- LogFactory.getLog("SecurityLogger."+AccessController.class.getName());
+ private static final Logger AUDITLOG =
+ LoggerFactory.getLogger("SecurityLogger."+AccessController.class.getName());
private static final String CHECK_COVERING_PERM = "check_covering_perm";
private static final String TAG_CHECK_PASSED = "tag_check_passed";
private static final byte[] TRUE = Bytes.toBytes(true);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java
index a1179b1dc3..44a4f57204 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java
@@ -24,8 +24,6 @@ import java.util.Optional;
import java.util.regex.Matcher;
import org.apache.commons.io.FilenameUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@@ -39,6 +37,8 @@ import org.apache.hadoop.hbase.coprocessor.MasterObserver;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Master observer for restricting coprocessor assignments.
@@ -49,8 +49,8 @@ public class CoprocessorWhitelistMasterObserver implements MasterCoprocessor, Ma
public static final String CP_COPROCESSOR_WHITELIST_PATHS_KEY =
"hbase.coprocessor.region.whitelist.paths";
- private static final Log LOG = LogFactory
- .getLog(CoprocessorWhitelistMasterObserver.class);
+ private static final Logger LOG = LoggerFactory
+ .getLogger(CoprocessorWhitelistMasterObserver.class);
@Override
public Optional getMasterObserver() {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java
index 2ba4ac5549..13e9c08547 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java
@@ -33,8 +33,6 @@ import java.util.Map;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.AuthUtil;
import org.apache.hadoop.hbase.Cell;
@@ -47,6 +45,8 @@ import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Performs authorization checks for a given user's assigned permissions
@@ -97,7 +97,7 @@ public class TableAuthManager implements Closeable {
}
}
- private static final Log LOG = LogFactory.getLog(TableAuthManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableAuthManager.class);
/** Cache of global permissions */
private volatile PermissionCache globalCache;
@@ -769,7 +769,7 @@ public class TableAuthManager implements Closeable {
if (refCount.get(instance) == null || refCount.get(instance) < 1) {
String msg = "Something wrong with the TableAuthManager reference counting: " + instance
+ " whose count is " + refCount.get(instance);
- LOG.fatal(msg);
+ LOG.error(msg);
instance.close();
managerMap.remove(instance.getZKPermissionWatcher().getWatcher());
instance.getZKPermissionWatcher().getWatcher().abort(msg, null);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java
index d45b5b5730..2437657fbc 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java
@@ -18,8 +18,6 @@
package org.apache.hadoop.hbase.security.access;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.DaemonThreadFactory;
import org.apache.hadoop.hbase.TableName;
@@ -30,6 +28,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.Closeable;
import java.io.IOException;
@@ -53,7 +53,7 @@ import java.util.concurrent.RejectedExecutionException;
*/
@InterfaceAudience.Private
public class ZKPermissionWatcher extends ZKListener implements Closeable {
- private static final Log LOG = LogFactory.getLog(ZKPermissionWatcher.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ZKPermissionWatcher.class);
// parent node for permissions lists
static final String ACL_NODE = "acl";
private final TableAuthManager authManager;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java
index aa6b1e94b7..de8ea5d3ab 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java
@@ -25,8 +25,6 @@ import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
@@ -40,6 +38,8 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.token.SecretManager;
import org.apache.hadoop.security.token.Token;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Manages an internal list of secret keys used to sign new authentication
@@ -60,7 +60,7 @@ public class AuthenticationTokenSecretManager
static final String NAME_PREFIX = "SecretManager-";
- private static final Log LOG = LogFactory.getLog(
+ private static final Logger LOG = LoggerFactory.getLogger(
AuthenticationTokenSecretManager.class);
private long lastKeyUpdate;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/FsDelegationToken.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/FsDelegationToken.java
index 3bf4df101a..389bcc6be9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/FsDelegationToken.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/FsDelegationToken.java
@@ -20,10 +20,10 @@ package org.apache.hadoop.hbase.security.token;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.security.token.Token;
@@ -36,7 +36,7 @@ import org.apache.hadoop.security.token.Token;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class FsDelegationToken {
- private static final Log LOG = LogFactory.getLog(FsDelegationToken.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FsDelegationToken.class);
private final UserProvider userProvider;
private final String renewer;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java
index e355752013..b137aaa30e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java
@@ -24,8 +24,6 @@ import com.google.protobuf.Service;
import java.io.IOException;
import java.util.Collections;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.CoreCoprocessor;
import org.apache.hadoop.hbase.coprocessor.HasRegionServerServices;
@@ -43,6 +41,8 @@ import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
import org.apache.hadoop.security.token.SecretManager;
import org.apache.hadoop.security.token.Token;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Provides a service for obtaining authentication tokens via the
@@ -53,7 +53,7 @@ import org.apache.yetus.audience.InterfaceAudience;
public class TokenProvider implements AuthenticationProtos.AuthenticationService.Interface,
RegionCoprocessor {
- private static final Log LOG = LogFactory.getLog(TokenProvider.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TokenProvider.class);
private AuthenticationTokenSecretManager secretManager;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java
index 3347e1caa6..5461760137 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java
@@ -24,8 +24,7 @@ import java.security.PrivilegedExceptionAction;
import com.google.protobuf.ByteString;
import com.google.protobuf.ServiceException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
@@ -43,6 +42,8 @@ import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.security.token.Token;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Utility methods for obtaining authentication tokens.
@@ -50,7 +51,7 @@ import org.apache.zookeeper.KeeperException;
@InterfaceAudience.Public
public class TokenUtil {
// This class is referenced indirectly by User out in common; instances are created by reflection
- private static final Log LOG = LogFactory.getLog(TokenUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TokenUtil.class);
/**
* Obtain and return an authentication token for the current user.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/ZKSecretWatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/ZKSecretWatcher.java
index 96502fd82a..6b4e9b691d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/ZKSecretWatcher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/ZKSecretWatcher.java
@@ -23,8 +23,6 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
import java.io.IOException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.zookeeper.ZKListener;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
@@ -33,6 +31,8 @@ import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Synchronizes token encryption keys across cluster nodes.
@@ -41,7 +41,7 @@ import org.apache.zookeeper.KeeperException;
public class ZKSecretWatcher extends ZKListener {
private static final String DEFAULT_ROOT_NODE = "tokenauth";
private static final String DEFAULT_KEYS_PARENT = "keys";
- private static final Log LOG = LogFactory.getLog(ZKSecretWatcher.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ZKSecretWatcher.class);
private AuthenticationTokenSecretManager secretManager;
private String baseKeyZNode;
@@ -77,7 +77,7 @@ public class ZKSecretWatcher extends ZKListener {
ZKUtil.getChildDataAndWatchForNewChildren(watcher, keysParentZNode);
refreshNodes(nodes);
} catch (KeeperException ke) {
- LOG.fatal("Error reading data from zookeeper", ke);
+ LOG.error("Error reading data from zookeeper", ke);
watcher.abort("Error reading new key znode "+path, ke);
}
}
@@ -110,10 +110,10 @@ public class ZKSecretWatcher extends ZKListener {
new AuthenticationKey());
secretManager.addKey(key);
} catch (KeeperException ke) {
- LOG.fatal("Error reading data from zookeeper", ke);
+ LOG.error("Error reading data from zookeeper", ke);
watcher.abort("Error reading updated key znode "+path, ke);
} catch (IOException ioe) {
- LOG.fatal("Error reading key writables", ioe);
+ LOG.error("Error reading key writables", ioe);
watcher.abort("Error reading key writables from znode "+path, ioe);
}
}
@@ -128,7 +128,7 @@ public class ZKSecretWatcher extends ZKListener {
ZKUtil.getChildDataAndWatchForNewChildren(watcher, keysParentZNode);
refreshNodes(nodes);
} catch (KeeperException ke) {
- LOG.fatal("Error reading data from zookeeper", ke);
+ LOG.error("Error reading data from zookeeper", ke);
watcher.abort("Error reading changed keys from zookeeper", ke);
}
}
@@ -152,7 +152,7 @@ public class ZKSecretWatcher extends ZKListener {
data, new AuthenticationKey());
secretManager.addKey(key);
} catch (IOException ioe) {
- LOG.fatal("Failed reading new secret key for id '" + keyId +
+ LOG.error("Failed reading new secret key for id '" + keyId +
"' from zk", ioe);
watcher.abort("Error deserializing key from znode "+path, ioe);
}
@@ -170,7 +170,7 @@ public class ZKSecretWatcher extends ZKListener {
} catch (KeeperException.NoNodeException nne) {
LOG.error("Non-existent znode "+keyZNode+" for key "+key.getKeyId(), nne);
} catch (KeeperException ke) {
- LOG.fatal("Failed removing znode "+keyZNode+" for key "+key.getKeyId(),
+ LOG.error("Failed removing znode "+keyZNode+" for key "+key.getKeyId(),
ke);
watcher.abort("Unhandled zookeeper error removing znode "+keyZNode+
" for key "+key.getKeyId(), ke);
@@ -184,7 +184,7 @@ public class ZKSecretWatcher extends ZKListener {
// TODO: is there any point in retrying beyond what ZK client does?
ZKUtil.createSetData(watcher, keyZNode, keyData);
} catch (KeeperException ke) {
- LOG.fatal("Unable to synchronize master key "+key.getKeyId()+
+ LOG.error("Unable to synchronize master key "+key.getKeyId()+
" to znode "+keyZNode, ke);
watcher.abort("Unable to synchronize secret key "+
key.getKeyId()+" in zookeeper", ke);
@@ -205,7 +205,7 @@ public class ZKSecretWatcher extends ZKListener {
ZKUtil.createSetData(watcher, keyZNode, keyData);
}
} catch (KeeperException ke) {
- LOG.fatal("Unable to update master key "+key.getKeyId()+
+ LOG.error("Unable to update master key "+key.getKeyId()+
" in znode "+keyZNode);
watcher.abort("Unable to synchronize secret key "+
key.getKeyId()+" in zookeeper", ke);
@@ -224,7 +224,7 @@ public class ZKSecretWatcher extends ZKListener {
ZKUtil.getChildDataAndWatchForNewChildren(watcher, keysParentZNode);
refreshNodes(nodes);
} catch (KeeperException ke) {
- LOG.fatal("Error reading data from zookeeper", ke);
+ LOG.error("Error reading data from zookeeper", ke);
watcher.abort("Error reading changed keys from zookeeper", ke);
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java
index d1fac75373..624597553f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java
@@ -40,8 +40,6 @@ import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.AuthUtil;
@@ -60,6 +58,8 @@ import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.coprocessor.HasRegionServerServices;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
@@ -78,7 +78,7 @@ import org.apache.hadoop.hbase.util.Pair;
@InterfaceAudience.Private
public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService {
- private static final Log LOG = LogFactory.getLog(DefaultVisibilityLabelServiceImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DefaultVisibilityLabelServiceImpl.class);
// "system" label is having an ordinal value 1.
private static final int SYSTEM_LABEL_ORDINAL = 1;
@@ -507,7 +507,7 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
authLabels = (authLabels == null) ? new ArrayList<>() : authLabels;
authorizations = new Authorizations(authLabels);
} catch (Throwable t) {
- LOG.error(t);
+ LOG.error(t.toString(), t);
throw new IOException(t);
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefinedSetFilterScanLabelGenerator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefinedSetFilterScanLabelGenerator.java
index 0b7214fb30..cecdfdb4b5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefinedSetFilterScanLabelGenerator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefinedSetFilterScanLabelGenerator.java
@@ -22,9 +22,9 @@ import java.util.HashSet;
import java.util.List;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.security.User;
@@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.security.User;
@InterfaceAudience.Private
public class DefinedSetFilterScanLabelGenerator implements ScanLabelGenerator {
- private static final Log LOG = LogFactory.getLog(DefinedSetFilterScanLabelGenerator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DefinedSetFilterScanLabelGenerator.class);
private Configuration conf;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/EnforcingScanLabelGenerator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/EnforcingScanLabelGenerator.java
index a2a9e04115..e2bc16b5f0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/EnforcingScanLabelGenerator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/EnforcingScanLabelGenerator.java
@@ -22,9 +22,9 @@ import java.util.HashSet;
import java.util.List;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.security.User;
@@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.security.User;
@InterfaceAudience.Private
public class EnforcingScanLabelGenerator implements ScanLabelGenerator {
- private static final Log LOG = LogFactory.getLog(EnforcingScanLabelGenerator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(EnforcingScanLabelGenerator.class);
private Configuration conf;
private VisibilityLabelsCache labelsCache;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/FeedUserAuthScanLabelGenerator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/FeedUserAuthScanLabelGenerator.java
index cd6ef86c5c..1c77a4d008 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/FeedUserAuthScanLabelGenerator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/FeedUserAuthScanLabelGenerator.java
@@ -22,9 +22,9 @@ import java.util.HashSet;
import java.util.List;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.security.User;
@@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.security.User;
@InterfaceAudience.Private
public class FeedUserAuthScanLabelGenerator implements ScanLabelGenerator {
- private static final Log LOG = LogFactory.getLog(FeedUserAuthScanLabelGenerator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FeedUserAuthScanLabelGenerator.class);
private Configuration conf;
private VisibilityLabelsCache labelsCache;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
index c6f81c4c65..b90f10484e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
@@ -38,8 +38,6 @@ import java.util.List;
import java.util.Map;
import java.util.Optional;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.AuthUtil;
import org.apache.hadoop.hbase.Cell;
@@ -114,6 +112,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Coprocessor that has both the MasterObserver and RegionObserver implemented that supports in
@@ -125,8 +125,8 @@ import org.apache.yetus.audience.InterfaceAudience;
public class VisibilityController implements MasterCoprocessor, RegionCoprocessor,
VisibilityLabelsService.Interface, MasterObserver, RegionObserver {
- private static final Log LOG = LogFactory.getLog(VisibilityController.class);
- private static final Log AUDITLOG = LogFactory.getLog("SecurityLogger."
+ private static final Logger LOG = LoggerFactory.getLogger(VisibilityController.class);
+ private static final Logger AUDITLOG = LoggerFactory.getLogger("SecurityLogger."
+ VisibilityController.class.getName());
// flags if we are running on a region of the 'labels' table
private boolean labelsRegion = false;
@@ -772,7 +772,7 @@ public class VisibilityController implements MasterCoprocessor, RegionCoprocesso
LOG.error("User is not having required permissions to add labels", e);
setExceptionResults(visLabels.size(), e, response);
} catch (IOException e) {
- LOG.error(e);
+ LOG.error(e.toString(), e);
setExceptionResults(visLabels.size(), e, response);
}
}
@@ -827,7 +827,7 @@ public class VisibilityController implements MasterCoprocessor, RegionCoprocesso
LOG.error("User is not having required permissions to set authorization", e);
setExceptionResults(auths.size(), e, response);
} catch (IOException e) {
- LOG.error(e);
+ LOG.error(e.toString(), e);
setExceptionResults(auths.size(), e, response);
}
}
@@ -951,7 +951,7 @@ public class VisibilityController implements MasterCoprocessor, RegionCoprocesso
LOG.error("User is not having required permissions to clear authorization", e);
setExceptionResults(auths.size(), e, response);
} catch (IOException e) {
- LOG.error(e);
+ LOG.error(e.toString(), e);
setExceptionResults(auths.size(), e, response);
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelServiceManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelServiceManager.java
index 16eff84310..74531b92ce 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelServiceManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelServiceManager.java
@@ -19,9 +19,9 @@ package org.apache.hadoop.hbase.security.visibility;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.ReflectionUtils;
@@ -31,7 +31,7 @@ import org.apache.hadoop.util.ReflectionUtils;
@InterfaceAudience.Private
public class VisibilityLabelServiceManager {
- private static final Log LOG = LogFactory.getLog(VisibilityLabelServiceManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(VisibilityLabelServiceManager.class);
public static final String VISIBILITY_LABEL_SERVICE_CLASS =
"hbase.regionserver.visibility.label.service.class";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsCache.java
index 85bc0d5173..438b616947 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsCache.java
@@ -27,8 +27,6 @@ import java.util.Map;
import java.util.Set;
import java.util.concurrent.locks.ReentrantReadWriteLock;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.AuthUtil;
import org.apache.yetus.audience.InterfaceAudience;
@@ -39,6 +37,8 @@ import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.Visibil
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Maintains the cache for visibility labels and also uses the zookeeper to update the labels in the
@@ -48,7 +48,7 @@ import org.apache.zookeeper.KeeperException;
@InterfaceAudience.Private
public class VisibilityLabelsCache implements VisibilityLabelOrdinalProvider {
- private static final Log LOG = LogFactory.getLog(VisibilityLabelsCache.class);
+ private static final Logger LOG = LoggerFactory.getLogger(VisibilityLabelsCache.class);
private static final List EMPTY_LIST = Collections.emptyList();
private static final Set EMPTY_SET = Collections.emptySet();
private static VisibilityLabelsCache instance;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityNewVersionBehaivorTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityNewVersionBehaivorTracker.java
index dc467d6f91..061d22b435 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityNewVersionBehaivorTracker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityNewVersionBehaivorTracker.java
@@ -30,13 +30,13 @@ import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.regionserver.querymatcher.NewVersionBehaviorTracker;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Similar to MvccSensitiveTracker but tracks the visibility expression also before
@@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.regionserver.querymatcher.NewVersionBehaviorTrack
*/
public class VisibilityNewVersionBehaivorTracker extends NewVersionBehaviorTracker {
- private static final Log LOG = LogFactory.getLog(VisibilityNewVersionBehaivorTracker.class);
+ private static final Logger LOG = LoggerFactory.getLogger(VisibilityNewVersionBehaivorTracker.class);
public VisibilityNewVersionBehaivorTracker(NavigableSet columns,
CellComparator cellComparator, int minVersion, int maxVersion, int resultMaxVersions,
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.java
index 038b79973e..a62a27b056 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.java
@@ -24,8 +24,6 @@ import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
@@ -34,6 +32,8 @@ import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.wal.WALKeyImpl;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
@@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.wal.WAL.Entry;
@InterfaceAudience.Private
public class VisibilityReplicationEndpoint implements ReplicationEndpoint {
- private static final Log LOG = LogFactory.getLog(VisibilityReplicationEndpoint.class);
+ private static final Logger LOG = LoggerFactory.getLogger(VisibilityReplicationEndpoint.class);
private final ReplicationEndpoint delegator;
private final VisibilityLabelService visibilityLabelsService;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java
index da0938b22d..6b9ac7449a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java
@@ -23,9 +23,9 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.querymatcher.ScanDeleteTracker;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
@@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.util.Triple;
@InterfaceAudience.Private
public class VisibilityScanDeleteTracker extends ScanDeleteTracker {
- private static final Log LOG = LogFactory.getLog(VisibilityScanDeleteTracker.class);
+ private static final Logger LOG = LoggerFactory.getLogger(VisibilityScanDeleteTracker.class);
/**
* This tag is used for the DELETE cell which has no visibility label.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
index 3db8d0ebf1..c177c2b09e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
@@ -35,8 +35,6 @@ import java.util.Optional;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
@@ -65,6 +63,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.SimpleMutableByteRange;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Utility method to support visibility
@@ -72,7 +72,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class VisibilityUtils {
- private static final Log LOG = LogFactory.getLog(VisibilityUtils.class);
+ private static final Logger LOG = LoggerFactory.getLogger(VisibilityUtils.class);
public static final String VISIBILITY_LABEL_GENERATOR_CLASS =
"hbase.regionserver.scan.visibility.label.generator.class";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ZKVisibilityLabelWatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ZKVisibilityLabelWatcher.java
index 5cc244cd69..d428ff4291 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ZKVisibilityLabelWatcher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ZKVisibilityLabelWatcher.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.security.visibility;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.zookeeper.ZKListener;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
@@ -28,6 +26,8 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A zk watcher that watches the labels table znode. This would create a znode
@@ -36,7 +36,7 @@ import org.apache.zookeeper.KeeperException;
@InterfaceAudience.Private
public class ZKVisibilityLabelWatcher extends ZKListener {
- private static final Log LOG = LogFactory.getLog(ZKVisibilityLabelWatcher.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ZKVisibilityLabelWatcher.class);
private static final String VISIBILITY_LABEL_ZK_PATH = "zookeeper.znode.visibility.label.parent";
private static final String DEFAULT_VISIBILITY_LABEL_NODE = "visibility/labels";
private static final String VISIBILITY_USER_AUTHS_ZK_PATH =
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
index e08d547d6b..99690de203 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
@@ -34,8 +34,6 @@ import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.ThreadPoolExecutor;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -66,7 +64,8 @@ import org.apache.hadoop.hbase.util.ModifyRegionUtils;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.io.IOUtils;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
@@ -115,7 +114,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot
*/
@InterfaceAudience.Private
public class RestoreSnapshotHelper {
- private static final Log LOG = LogFactory.getLog(RestoreSnapshotHelper.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RestoreSnapshotHelper.class);
private final Map regionsMap = new TreeMap<>(Bytes.BYTES_COMPARATOR);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java
index 61a4a85ed0..8e2605ec27 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java
@@ -22,8 +22,6 @@ import java.security.PrivilegedExceptionAction;
import java.util.Collections;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -33,6 +31,8 @@ import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -97,7 +97,7 @@ public final class SnapshotDescriptionUtils {
}
}
- private static final Log LOG = LogFactory.getLog(SnapshotDescriptionUtils.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SnapshotDescriptionUtils.class);
/**
* Version of the fs layout for a snapshot. Future snapshots may have different file layouts,
* which we may need to read in differently.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
index c76155c6de..7d7e526b6d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
@@ -34,8 +34,6 @@ import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -49,7 +47,8 @@ import org.apache.hadoop.hbase.util.AbstractHBaseTool;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
@@ -65,7 +64,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot
*/
@InterfaceAudience.Public
public final class SnapshotInfo extends AbstractHBaseTool {
- private static final Log LOG = LogFactory.getLog(SnapshotInfo.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SnapshotInfo.class);
static final class Options {
static final Option SNAPSHOT = new Option(null, "snapshot", true, "Snapshot to examine.");
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
index 1a7c7f017a..b334585454 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
@@ -28,8 +28,6 @@ import java.util.Map;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -51,7 +49,8 @@ import org.apache.hadoop.hbase.util.FSTableDescriptors;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
@@ -69,7 +68,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot
*/
@InterfaceAudience.Private
public final class SnapshotManifest {
- private static final Log LOG = LogFactory.getLog(SnapshotManifest.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SnapshotManifest.class);
public static final String SNAPSHOT_MANIFEST_SIZE_LIMIT_CONF_KEY = "snapshot.manifest.size.limit";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java
index 61cbbd1172..7dfeab39a4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java
@@ -28,8 +28,6 @@ import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorCompletionService;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -40,7 +38,8 @@ import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
@@ -57,7 +56,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot
*/
@InterfaceAudience.Private
public final class SnapshotManifestV1 {
- private static final Log LOG = LogFactory.getLog(SnapshotManifestV1.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SnapshotManifestV1.class);
public static final int DESCRIPTOR_VERSION = 0;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java
index 561eb77638..5b7152aad5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java
@@ -27,8 +27,6 @@ import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorCompletionService;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -40,7 +38,8 @@ import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
@@ -58,7 +57,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot
*/
@InterfaceAudience.Private
public final class SnapshotManifestV2 {
- private static final Log LOG = LogFactory.getLog(SnapshotManifestV2.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SnapshotManifestV2.class);
public static final int DESCRIPTOR_VERSION = 2;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java
index 0cca62fa33..b157d01e19 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java
@@ -29,8 +29,6 @@ import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.ExecutorService;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -42,7 +40,8 @@ import org.apache.hadoop.hbase.mob.MobUtils;
import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
import org.apache.hadoop.hbase.util.HFileArchiveUtil;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
@@ -52,7 +51,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot
*/
@InterfaceAudience.Private
public final class SnapshotReferenceUtil {
- private static final Log LOG = LogFactory.getLog(SnapshotReferenceUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SnapshotReferenceUtil.class);
public interface StoreFileVisitor {
void storeFile(final RegionInfo regionInfo, final String familyName,
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
index e942a020e0..969a7579ba 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
@@ -49,8 +49,6 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang3.time.StopWatch;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.AuthUtil;
import org.apache.hadoop.hbase.ChoreService;
@@ -98,7 +96,8 @@ import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.client.ConnectStringParser;
import org.apache.zookeeper.data.Stat;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
/**
@@ -540,10 +539,10 @@ public final class Canary implements Tool {
LOG.debug("The targeted table was disabled. Assuming success.");
} catch (DoNotRetryIOException dnrioe) {
sink.publishReadFailure(tableName.getNameAsString(), serverName);
- LOG.error(dnrioe);
+ LOG.error(dnrioe.toString(), dnrioe);
} catch (IOException e) {
sink.publishReadFailure(tableName.getNameAsString(), serverName);
- LOG.error(e);
+ LOG.error(e.toString(), e);
} finally {
if (table != null) {
try {
@@ -571,7 +570,7 @@ public final class Canary implements Tool {
private static final long DEFAULT_TIMEOUT = 600000; // 10 mins
private static final int MAX_THREADS_NUM = 16; // #threads to contact regions
- private static final Log LOG = LogFactory.getLog(Canary.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Canary.class);
public static final TableName DEFAULT_WRITE_TABLE_NAME = TableName.valueOf(
NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, "canary");
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java
index c457e224da..b5eea9cd0c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java
@@ -50,8 +50,6 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import org.apache.commons.lang3.mutable.MutableInt;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileStatus;
@@ -63,6 +61,8 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ClientServiceCallable;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
@@ -109,7 +109,7 @@ import org.apache.hadoop.util.ToolRunner;
@InterfaceAudience.Public
public class LoadIncrementalHFiles extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(LoadIncrementalHFiles.class);
+ private static final Logger LOG = LoggerFactory.getLogger(LoadIncrementalHFiles.class);
public static final String NAME = "completebulkload";
static final String RETRY_ON_IO_EXCEPTION = "hbase.bulkload.retries.retryOnIOException";
@@ -328,7 +328,7 @@ public class LoadIncrementalHFiles extends Configured implements Tool {
if (queue.isEmpty()) {
LOG.warn(
"Bulk load operation did not find any files to load in " + "directory " + hfofDir != null
- ? hfofDir.toUri()
+ ? hfofDir.toUri().toString()
: "" + ". Does it contain files in " +
"subdirectories that correspond to column family names?");
return Collections.emptyMap();
@@ -877,7 +877,7 @@ public class LoadIncrementalHFiles extends Configured implements Tool {
for (LoadQueueItem q : queue) {
err.append(" ").append(q.getFilePath()).append('\n');
}
- LOG.error(err);
+ LOG.error(err.toString());
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/WriteSinkCoprocessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/WriteSinkCoprocessor.java
index 60fd22d852..738ffc28c1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/WriteSinkCoprocessor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/WriteSinkCoprocessor.java
@@ -18,8 +18,6 @@
*/
package org.apache.hadoop.hbase.tool;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
@@ -28,6 +26,8 @@ import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.RegionObserver;
import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
import org.apache.hadoop.hbase.regionserver.OperationStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Optional;
@@ -61,7 +61,7 @@ import java.util.concurrent.atomic.AtomicLong;
*
*/
public class WriteSinkCoprocessor implements RegionCoprocessor, RegionObserver {
- private static final Log LOG = LogFactory.getLog(WriteSinkCoprocessor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(WriteSinkCoprocessor.class);
private final AtomicLong ops = new AtomicLong();
@Override
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java
index b4851bf105..89ff5b7222 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.util;
import java.io.DataInput;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
@@ -31,6 +29,8 @@ import org.apache.hadoop.hbase.io.hfile.CompoundBloomFilterWriter;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Handles Bloom filter initialization based on configuration and serialized metadata in the reader
@@ -39,8 +39,8 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public final class BloomFilterFactory {
- private static final Log LOG =
- LogFactory.getLog(BloomFilterFactory.class.getName());
+ private static final Logger LOG =
+ LoggerFactory.getLogger(BloomFilterFactory.class.getName());
/** This class should not be instantiated. */
private BloomFilterFactory() {}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java
index dbc7afa744..b6af8a5507 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java
@@ -22,11 +22,11 @@ import java.io.IOException;
import java.util.Locale;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -52,7 +52,7 @@ import org.apache.hadoop.io.compress.Compressor;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
@InterfaceStability.Evolving
public class CompressionTest {
- private static final Log LOG = LogFactory.getLog(CompressionTest.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CompressionTest.class);
public static boolean testCompression(String codec) {
codec = codec.toLowerCase(Locale.ROOT);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
index dadb615498..7b9f021313 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
@@ -23,13 +23,14 @@ import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.locks.Lock;
-import org.apache.commons.logging.Log;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ChoreService;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -38,7 +39,6 @@ import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.commons.logging.LogFactory;
/**
* A utility to store user specific HConnections in memory.
@@ -48,7 +48,7 @@ import org.apache.commons.logging.LogFactory;
*/
@InterfaceAudience.Private
public class ConnectionCache {
- private static final Log LOG = LogFactory.getLog(ConnectionCache.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ConnectionCache.class);
private final Map connections = new ConcurrentHashMap<>();
private final KeyLocker locker = new KeyLocker<>();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/DirectMemoryUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/DirectMemoryUtils.java
index 2cc4f44e2c..6c6a09d92f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/DirectMemoryUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/DirectMemoryUtils.java
@@ -32,11 +32,10 @@ import javax.management.MBeanServer;
import javax.management.MalformedObjectNameException;
import javax.management.ObjectName;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
/**
@@ -45,7 +44,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class DirectMemoryUtils {
- private static final Log LOG = LogFactory.getLog(DirectMemoryUtils.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DirectMemoryUtils.class);
private static final String MEMORY_USED = "MemoryUsed";
private static final MBeanServer BEAN_SERVER;
private static final ObjectName NIO_DIRECT_POOL;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/EncryptionTest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/EncryptionTest.java
index e6b8c0aa34..2687d3b033 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/EncryptionTest.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/EncryptionTest.java
@@ -24,10 +24,10 @@ import java.io.IOException;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.io.crypto.DefaultCipherProvider;
@@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.security.EncryptionUtil;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
public class EncryptionTest {
- private static final Log LOG = LogFactory.getLog(EncryptionTest.class);
+ private static final Logger LOG = LoggerFactory.getLogger(EncryptionTest.class);
static final Map keyProviderResults = new ConcurrentHashMap<>();
static final Map cipherProviderResults = new ConcurrentHashMap<>();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java
index 8e13f4015d..c5b6e8f08a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java
@@ -30,10 +30,10 @@ import java.util.Set;
import java.util.Collection;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -47,7 +47,7 @@ import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class FSHDFSUtils extends FSUtils {
- private static final Log LOG = LogFactory.getLog(FSHDFSUtils.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FSHDFSUtils.class);
private static Class dfsUtilClazz;
private static Method getNNAddressesMethod;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSMapRUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSMapRUtils.java
index bb7b1f3a29..4207f391db 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSMapRUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSMapRUtils.java
@@ -21,19 +21,19 @@ package org.apache.hadoop.hbase.util;
import java.io.IOException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
/**
* MapR implementation.
*/
@InterfaceAudience.Private
public class FSMapRUtils extends FSUtils {
- private static final Log LOG = LogFactory.getLog(FSMapRUtils.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FSMapRUtils.class);
public void recoverFileLease(final FileSystem fs, final Path p,
Configuration conf, CancelableProgressable reporter) throws IOException {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSRegionScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSRegionScanner.java
index 66ac3956d6..f258e6cd93 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSRegionScanner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSRegionScanner.java
@@ -23,9 +23,10 @@ import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -39,7 +40,7 @@ import org.apache.hadoop.hbase.util.FSUtils;
*/
@InterfaceAudience.Private
class FSRegionScanner implements Runnable {
- static private final Log LOG = LogFactory.getLog(FSRegionScanner.class);
+ static private final Logger LOG = LoggerFactory.getLogger(FSRegionScanner.class);
private Path regionPath;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
index e1bc189e3f..5627e9a045 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
@@ -30,8 +30,6 @@ import java.util.regex.Pattern;
import edu.umd.cs.findbugs.annotations.Nullable;
import org.apache.commons.lang3.NotImplementedException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -40,6 +38,8 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
@@ -73,7 +73,7 @@ import org.apache.hadoop.hbase.TableName;
*/
@InterfaceAudience.Private
public class FSTableDescriptors implements TableDescriptors {
- private static final Log LOG = LogFactory.getLog(FSTableDescriptors.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FSTableDescriptors.class);
private final FileSystem fs;
private final Path rootdir;
private final boolean fsreadonly;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
index 81fcaf201f..1620fd8bee 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
@@ -55,8 +55,6 @@ import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataInputStream;
@@ -74,6 +72,8 @@ import org.apache.hadoop.hbase.HDFSBlocksDistribution;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.io.HFileLink;
@@ -100,7 +100,7 @@ import org.apache.hadoop.util.StringUtils;
*/
@InterfaceAudience.Private
public abstract class FSUtils extends CommonFSUtils {
- private static final Log LOG = LogFactory.getLog(FSUtils.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FSUtils.class);
private static final String THREAD_POOLSIZE = "hbase.client.localityCheck.threadPoolSize";
private static final int DEFAULT_THREAD_POOLSIZE = 2;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSVisitor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSVisitor.java
index 353f1c7a25..24cd223471 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSVisitor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSVisitor.java
@@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.util;
import java.io.IOException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -34,7 +34,7 @@ import org.apache.hadoop.fs.PathFilter;
*/
@InterfaceAudience.Private
public final class FSVisitor {
- private static final Log LOG = LogFactory.getLog(FSVisitor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FSVisitor.class);
public interface StoreFileVisitor {
void storeFile(final String region, final String family, final String hfileName)
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
index aab3b36e67..d2e34dd29f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
@@ -38,6 +38,7 @@ import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
+import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.SortedMap;
@@ -61,8 +62,6 @@ import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -141,7 +140,8 @@ import org.apache.hadoop.util.ToolRunner;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
import org.apache.zookeeper.KeeperException;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList;
@@ -223,7 +223,7 @@ public class HBaseFsck extends Configured implements Closeable {
/**********************
* Internal resources
**********************/
- private static final Log LOG = LogFactory.getLog(HBaseFsck.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());
private ClusterStatus status;
private ClusterConnection connection;
private Admin admin;
@@ -805,7 +805,7 @@ public class HBaseFsck extends Configured implements Closeable {
cleanupHbckZnode();
unlockHbck();
} catch (Exception io) {
- LOG.warn(io);
+ LOG.warn(io.toString(), io);
} finally {
if (zkw != null) {
zkw.close();
@@ -907,11 +907,11 @@ public class HBaseFsck extends Configured implements Closeable {
errors.reportError(ERROR_CODE.BOUNDARIES_ERROR, "Found issues with regions boundaries",
tablesInfo.get(regionInfo.getTable()));
LOG.warn("Region's boundaries not aligned between stores and META for:");
- LOG.warn(currentRegionBoundariesInformation);
+ LOG.warn(Objects.toString(currentRegionBoundariesInformation));
}
}
} catch (IOException e) {
- LOG.error(e);
+ LOG.error(e.toString(), e);
}
}
@@ -1597,7 +1597,7 @@ public class HBaseFsck extends Configured implements Closeable {
// populate meta
List puts = generatePuts(tablesInfo);
if (puts == null) {
- LOG.fatal("Problem encountered when creating new hbase:meta entries. " +
+ LOG.error("Problem encountered when creating new hbase:meta entries. " +
"You may need to restore the previously sidelined hbase:meta");
return false;
}
@@ -1791,7 +1791,7 @@ public class HBaseFsck extends Configured implements Closeable {
try {
sidelineTable(fs, TableName.META_TABLE_NAME, hbaseDir, backupDir);
} catch (IOException e) {
- LOG.fatal("... failed to sideline meta. Currently in inconsistent state. To restore "
+ LOG.error("... failed to sideline meta. Currently in inconsistent state. To restore "
+ "try to rename hbase:meta in " + backupDir.getName() + " to "
+ hbaseDir.getName() + ".", e);
throw e; // throw original exception
@@ -1882,7 +1882,7 @@ public class HBaseFsck extends Configured implements Closeable {
* Record the location of the hbase:meta region as found in ZooKeeper.
*/
private boolean recordMetaRegion() throws IOException {
- RegionLocations rl = ((ClusterConnection)connection).locateRegion(TableName.META_TABLE_NAME,
+ RegionLocations rl = connection.locateRegion(TableName.META_TABLE_NAME,
HConstants.EMPTY_START_ROW, false, false);
if (rl == null) {
errors.reportError(ERROR_CODE.NULL_META_REGION,
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsckRepair.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsckRepair.java
index afb6c5b0e1..b8811c7ce9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsckRepair.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsckRepair.java
@@ -24,8 +24,6 @@ import java.util.EnumSet;
import java.util.List;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ClusterStatus.Option;
@@ -46,6 +44,8 @@ import org.apache.hadoop.hbase.master.ServerManager;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This class contains helper methods that repair parts of hbase's filesystem
@@ -53,7 +53,7 @@ import org.apache.zookeeper.KeeperException;
*/
@InterfaceAudience.Private
public class HBaseFsckRepair {
- private static final Log LOG = LogFactory.getLog(HBaseFsckRepair.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HBaseFsckRepair.class);
/**
* Fix multiple assignment by doing silent closes on each RS hosting the region
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JVMClusterUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JVMClusterUtil.java
index b88c0e63f6..00410af2fa 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JVMClusterUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JVMClusterUtil.java
@@ -24,9 +24,9 @@ import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CoordinatedStateManager;
import org.apache.hadoop.hbase.master.HMaster;
@@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.regionserver.HRegionServer;
*/
@InterfaceAudience.Private
public class JVMClusterUtil {
- private static final Log LOG = LogFactory.getLog(JVMClusterUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(JVMClusterUtil.class);
/**
* Datastructure to hold RegionServer Thread and RegionServer instance
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JvmPauseMonitor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JvmPauseMonitor.java
index 765edf93c4..202b9fb2d0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JvmPauseMonitor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JvmPauseMonitor.java
@@ -24,9 +24,9 @@ import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.metrics.JvmPauseMonitorSource;
import org.apache.hadoop.conf.Configuration;
@@ -51,7 +51,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
*/
@InterfaceAudience.Private
public class JvmPauseMonitor {
- private static final Log LOG = LogFactory.getLog(JvmPauseMonitor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(JvmPauseMonitor.class);
/** The target sleep time */
private static final long SLEEP_INTERVAL_MS = 500;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ModifyRegionUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ModifyRegionUtils.java
index fe33c24d99..1c860b42e9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ModifyRegionUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ModifyRegionUtils.java
@@ -32,8 +32,6 @@ import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HConstants;
@@ -42,13 +40,15 @@ import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Utility methods for interacting with the regions.
*/
@InterfaceAudience.Private
public abstract class ModifyRegionUtils {
- private static final Log LOG = LogFactory.getLog(ModifyRegionUtils.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ModifyRegionUtils.class);
private ModifyRegionUtils() {
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/MultiHConnection.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/MultiHConnection.java
index 1f19848317..58057932bd 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/MultiHConnection.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/MultiHConnection.java
@@ -27,12 +27,12 @@ import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.ClusterConnection;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.client.coprocessor.Batch;
*/
@InterfaceAudience.Private
public class MultiHConnection {
- private static final Log LOG = LogFactory.getLog(MultiHConnection.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MultiHConnection.class);
private Connection[] connections;
private final Object connectionsLock = new Object();
private final int noOfConnections;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionMover.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionMover.java
index 2dc1fe9c93..711507b0f6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionMover.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionMover.java
@@ -43,8 +43,6 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.apache.commons.cli.CommandLine;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClusterStatus.Option;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -64,6 +62,8 @@ import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tool for loading/unloading regions to/from given regionserver This tool can be run from Command
@@ -82,7 +82,7 @@ public class RegionMover extends AbstractHBaseTool {
public static final int DEFAULT_MOVE_RETRIES_MAX = 5;
public static final int DEFAULT_MOVE_WAIT_MAX = 60;
public static final int DEFAULT_SERVERSTART_WAIT_MAX = 180;
- static final Log LOG = LogFactory.getLog(RegionMover.class);
+ static final Logger LOG = LoggerFactory.getLogger(RegionMover.class);
private RegionMoverBuilder rmbuilder;
private boolean ack = true;
private int maxthreads = 1;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitCalculator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitCalculator.java
index 91be6e8498..e41882fb37 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitCalculator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitCalculator.java
@@ -26,9 +26,9 @@ import java.util.Map.Entry;
import java.util.TreeMap;
import java.util.TreeSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
@@ -53,7 +53,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.TreeMultimap;
*/
@InterfaceAudience.Private
public class RegionSplitCalculator {
- private static final Log LOG = LogFactory.getLog(RegionSplitCalculator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionSplitCalculator.class);
private final Comparator rangeCmp;
/**
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java
index 06bccd13ab..5f480a5eee 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java
@@ -37,8 +37,6 @@ import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -56,6 +54,8 @@ import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ClusterConnection;
import org.apache.hadoop.hbase.client.Connection;
@@ -145,7 +145,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
*/
@InterfaceAudience.Private
public class RegionSplitter {
- private static final Log LOG = LogFactory.getLog(RegionSplitter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionSplitter.class);
/**
* A generic interface for the RegionSplitter code to use for all it's
@@ -434,7 +434,7 @@ public class RegionSplitter {
* Alternative getCurrentNrHRS which is no longer available.
* @param connection
* @return Rough count of regionservers out on cluster.
- * @throws IOException
+ * @throws IOException
*/
private static int getRegionServerCount(final Connection connection) throws IOException {
try (Admin admin = connection.getAdmin()) {
@@ -729,7 +729,7 @@ public class RegionSplitter {
}
} catch (NoServerForRegionException nsfre) {
// NSFRE will occur if the old hbase:meta entry has no server assigned
- LOG.info(nsfre);
+ LOG.info(nsfre.toString(), nsfre);
logicalSplitting.add(region);
continue;
}
@@ -785,7 +785,7 @@ public class RegionSplitter {
* @param conf
* @param tableName
* @return A Pair where first item is table dir and second is the split file.
- * @throws IOException
+ * @throws IOException
*/
private static Pair getTableDirAndSplitFile(final Configuration conf,
final TableName tableName)
@@ -803,7 +803,7 @@ public class RegionSplitter {
getTableDirAndSplitFile(connection.getConfiguration(), tableName);
Path tableDir = tableDirAndSplitFile.getFirst();
Path splitFile = tableDirAndSplitFile.getSecond();
-
+
FileSystem fs = tableDir.getFileSystem(connection.getConfiguration());
// Using strings because (new byte[]{0}).equals(new byte[]{0}) == false
@@ -949,6 +949,7 @@ public class RegionSplitter {
this.rowComparisonLength = lastRow.length();
}
+ @Override
public byte[] split(byte[] start, byte[] end) {
BigInteger s = convertToBigInteger(start);
BigInteger e = convertToBigInteger(end);
@@ -956,6 +957,7 @@ public class RegionSplitter {
return convertToByte(split2(s, e));
}
+ @Override
public byte[][] split(int n) {
Preconditions.checkArgument(lastRowInt.compareTo(firstRowInt) > 0,
"last row (%s) is configured less than first row (%s)", lastRow,
@@ -1009,19 +1011,23 @@ public class RegionSplitter {
}
}
+ @Override
public byte[] firstRow() {
return convertToByte(firstRowInt);
}
+ @Override
public byte[] lastRow() {
return convertToByte(lastRowInt);
}
+ @Override
public void setFirstRow(String userInput) {
firstRow = userInput;
firstRowInt = new BigInteger(firstRow, radix);
}
+ @Override
public void setLastRow(String userInput) {
lastRow = userInput;
lastRowInt = new BigInteger(lastRow, radix);
@@ -1029,14 +1035,17 @@ public class RegionSplitter {
rowComparisonLength = lastRow.length();
}
+ @Override
public byte[] strToRow(String in) {
return convertToByte(new BigInteger(in, radix));
}
+ @Override
public String rowToStr(byte[] row) {
return Bytes.toStringBinary(row);
}
+ @Override
public String separator() {
return " ";
}
@@ -1130,6 +1139,7 @@ public class RegionSplitter {
byte[] firstRowBytes = ArrayUtils.EMPTY_BYTE_ARRAY;
byte[] lastRowBytes =
new byte[] {xFF, xFF, xFF, xFF, xFF, xFF, xFF, xFF};
+ @Override
public byte[] split(byte[] start, byte[] end) {
return Bytes.split(start, end, 1)[1];
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerCommandLine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerCommandLine.java
index 4175526ec6..83ec5ffc63 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerCommandLine.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerCommandLine.java
@@ -26,9 +26,9 @@ import java.util.Locale;
import java.util.Map.Entry;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -40,7 +40,7 @@ import org.apache.hadoop.util.ToolRunner;
*/
@InterfaceAudience.Private
public abstract class ServerCommandLine extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(ServerCommandLine.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ServerCommandLine.class);
@SuppressWarnings("serial")
private static final Set DEFAULT_SKIP_WORDS = new HashSet() {
{
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
index 9b61b8b410..fe514d8c1c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.util;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -37,13 +35,15 @@ import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
import org.apache.hadoop.hbase.replication.regionserver.RegionReplicaReplicationEndpoint;
import org.apache.hadoop.hbase.zookeeper.ZKConfig;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Similar to {@link RegionReplicaUtil} but for the server side
*/
public class ServerRegionReplicaUtil extends RegionReplicaUtil {
- private static final Log LOG = LogFactory.getLog(ServerRegionReplicaUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ServerRegionReplicaUtil.class);
/**
* Whether asynchronous WAL replication to the secondary region replicas is enabled or not.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ZKDataMigrator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ZKDataMigrator.java
index 11327e8baa..b22b4ff40b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ZKDataMigrator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ZKDataMigrator.java
@@ -22,8 +22,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.TableState;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -34,6 +32,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* utlity method to migrate zookeeper data across HBase versions.
@@ -41,7 +41,7 @@ import org.apache.zookeeper.KeeperException;
@InterfaceAudience.Private
public class ZKDataMigrator {
- private static final Log LOG = LogFactory.getLog(ZKDataMigrator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ZKDataMigrator.class);
/**
* Method for table states migration.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.java
index 44bbb38ca5..e937fa529a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.java
@@ -31,9 +31,9 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -60,7 +60,7 @@ import org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter;
*/
@InterfaceAudience.Private
public class HFileCorruptionChecker {
- private static final Log LOG = LogFactory.getLog(HFileCorruptionChecker.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HFileCorruptionChecker.class);
final Configuration conf;
final FileSystem fs;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRepair.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRepair.java
index c208d8aa3a..534b948bfe 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRepair.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRepair.java
@@ -20,10 +20,10 @@ package org.apache.hadoop.hbase.util.hbck;
import java.io.IOException;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -46,7 +46,7 @@ import org.apache.hadoop.io.MultipleIOException;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
@InterfaceStability.Evolving
public class OfflineMetaRepair {
- private static final Log LOG = LogFactory.getLog(OfflineMetaRepair.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(OfflineMetaRepair.class.getName());
protected static void printUsageAndExit() {
StringBuilder sb = new StringBuilder();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java
index aba13c658f..8bd9a3086a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java
@@ -25,8 +25,6 @@ import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
@@ -35,6 +33,8 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;
import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
import org.apache.hadoop.hbase.util.CancelableProgressable;
@@ -56,7 +56,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
@InterfaceStability.Evolving
public abstract class AbstractFSWALProvider> implements WALProvider {
- private static final Log LOG = LogFactory.getLog(AbstractFSWALProvider.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AbstractFSWALProvider.class);
/** Separate old log into different dir by regionserver name **/
public static final String SEPARATE_OLDLOGDIR = "hbase.separate.oldlogdir.by.regionserver";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AsyncFSWALProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AsyncFSWALProvider.java
index 5cb01899db..8bb1802e6b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AsyncFSWALProvider.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AsyncFSWALProvider.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.wal;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -31,7 +29,8 @@ import org.apache.hadoop.hbase.util.CommonFSUtils.StreamLacksCapabilityException
import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
import org.apache.hadoop.hbase.shaded.io.netty.channel.Channel;
import org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoopGroup;
@@ -46,7 +45,7 @@ import org.apache.hadoop.hbase.shaded.io.netty.util.concurrent.DefaultThreadFact
@InterfaceStability.Evolving
public class AsyncFSWALProvider extends AbstractFSWALProvider {
- private static final Log LOG = LogFactory.getLog(AsyncFSWALProvider.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AsyncFSWALProvider.class);
// Only public so classes back in regionserver.wal can access
public interface AsyncWriter extends WALProvider.AsyncWriter {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DisabledWALProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DisabledWALProvider.java
index cedf3509f5..280d95fec3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DisabledWALProvider.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DisabledWALProvider.java
@@ -26,8 +26,6 @@ import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
@@ -38,6 +36,8 @@ import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
import org.apache.hadoop.hbase.regionserver.wal.WALCoprocessorHost;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
// imports for things that haven't moved from regionserver.wal yet.
@@ -50,7 +50,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
class DisabledWALProvider implements WALProvider {
- private static final Log LOG = LogFactory.getLog(DisabledWALProvider.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DisabledWALProvider.class);
WAL disabled;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/FSHLogProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/FSHLogProvider.java
index b72e66841e..14505a8a9c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/FSHLogProvider.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/FSHLogProvider.java
@@ -20,14 +20,14 @@ package org.apache.hadoop.hbase.wal;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
// imports for things that haven't moved from regionserver.wal yet.
import org.apache.hadoop.hbase.regionserver.wal.FSHLog;
import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogWriter;
@@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.util.CommonFSUtils;
@InterfaceStability.Evolving
public class FSHLogProvider extends AbstractFSWALProvider {
- private static final Log LOG = LogFactory.getLog(FSHLogProvider.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FSHLogProvider.class);
// Only public so classes back in regionserver.wal can access
public interface Writer extends WALProvider.Writer {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/RegionGroupingProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/RegionGroupingProvider.java
index ab3a7d9419..b8c9484ab3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/RegionGroupingProvider.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/RegionGroupingProvider.java
@@ -28,10 +28,10 @@ import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
// imports for classes still in regionserver.wal
import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
import org.apache.hadoop.hbase.util.Bytes;
@@ -55,7 +55,7 @@ import org.apache.hadoop.hbase.util.IdLock;
*/
@InterfaceAudience.Private
public class RegionGroupingProvider implements WALProvider {
- private static final Log LOG = LogFactory.getLog(RegionGroupingProvider.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionGroupingProvider.class);
/**
* Map identifiers to a group number.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALEdit.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALEdit.java
index f5b611bae8..c909e905da 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALEdit.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALEdit.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.wal;
import java.io.IOException;
import java.util.ArrayList;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@@ -35,7 +33,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor;
@@ -55,7 +54,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDe
@InterfaceAudience.LimitedPrivate({ HBaseInterfaceAudience.REPLICATION,
HBaseInterfaceAudience.COPROC })
public class WALEdit implements HeapSize {
- private static final Log LOG = LogFactory.getLog(WALEdit.class);
+ private static final Logger LOG = LoggerFactory.getLogger(WALEdit.class);
// TODO: Get rid of this; see HBASE-8457
public static final byte [] METAFAMILY = Bytes.toBytes("METAFAMILY");
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java
index 5855419683..0628f8652f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java
@@ -29,12 +29,12 @@ import java.util.List;
import java.util.OptionalLong;
import java.util.concurrent.atomic.AtomicReference;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
// imports for things that haven't moved from regionserver.wal yet.
import org.apache.hadoop.hbase.regionserver.wal.MetricsWAL;
import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader;
@@ -67,7 +67,7 @@ import org.apache.hadoop.hbase.wal.WALProvider.Writer;
@InterfaceAudience.Private
public class WALFactory implements WALFileLengthProvider {
- private static final Log LOG = LogFactory.getLog(WALFactory.class);
+ private static final Logger LOG = LoggerFactory.getLogger(WALFactory.class);
/**
* Maps between configuration names for providers and implementation classes.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
index ce1713a127..2427588fd3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
@@ -52,8 +52,6 @@ import java.util.concurrent.atomic.AtomicReference;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FileStatus;
@@ -97,6 +95,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;
import org.apache.hadoop.io.MultipleIOException;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This class is responsible for splitting up a bunch of regionserver commit log
@@ -105,7 +105,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class WALSplitter {
- private static final Log LOG = LogFactory.getLog(WALSplitter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(WALSplitter.class);
/** By default we retry errors in splitting, rather than skipping. */
public static final boolean SPLIT_SKIP_ERRORS_DEFAULT = false;
@@ -1538,7 +1538,7 @@ public class WALSplitter {
} catch (IOException e) {
e = e instanceof RemoteException ?
((RemoteException)e).unwrapRemoteException() : e;
- LOG.fatal(" Got while writing log entry to log", e);
+ LOG.error(" Got while writing log entry to log", e);
throw e;
}
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestTool.java
index 6a214385e6..37f3279dac 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestTool.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestTool.java
@@ -28,8 +28,6 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Stream;
import org.apache.commons.cli.CommandLine;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.MultithreadedTestUtil.RepeatingTestThread;
import org.apache.hadoop.hbase.MultithreadedTestUtil.TestContext;
@@ -51,7 +49,8 @@ import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.ToolRunner;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
/**
@@ -61,7 +60,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@InterfaceAudience.Private
public class AcidGuaranteesTestTool extends AbstractHBaseTool {
- private static final Log LOG = LogFactory.getLog(AcidGuaranteesTestTool.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AcidGuaranteesTestTool.class);
public static final TableName TABLE_NAME = TableName.valueOf("TestAcidGuarantees");
public static final byte[] FAMILY_A = Bytes.toBytes("A");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/GenericTestUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/GenericTestUtils.java
index 08565e07af..2dc1aeb91c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/GenericTestUtils.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/GenericTestUtils.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.hbase;
import java.io.File;
import java.io.IOException;
-import java.io.StringWriter;
import java.lang.management.ManagementFactory;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
@@ -32,21 +31,16 @@ import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.Supplier;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
import org.apache.hadoop.util.Time;
-import org.apache.log4j.Layout;
import org.apache.log4j.Logger;
-import org.apache.log4j.WriterAppender;
import org.junit.Assert;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
-import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
-import org.apache.hadoop.hbase.shaded.com.google.common.base.Supplier;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
-
/**
* Test provides some very generic helpers which might be used across the tests
*/
@@ -117,42 +111,12 @@ public abstract class GenericTestUtils {
TimedOutTestsListener.buildThreadDiagnosticString());
}
- public static class LogCapturer {
- private StringWriter sw = new StringWriter();
- private WriterAppender appender;
- private Logger logger;
-
- public static LogCapturer captureLogs(Log l) {
- Logger logger = ((Log4JLogger)l).getLogger();
- LogCapturer c = new LogCapturer(logger);
- return c;
- }
-
-
- private LogCapturer(Logger logger) {
- this.logger = logger;
- Layout layout = Logger.getRootLogger().getAppender("stdout").getLayout();
- WriterAppender wa = new WriterAppender(layout, sw);
- logger.addAppender(wa);
- }
-
- public String getOutput() {
- return sw.toString();
- }
-
- public void stopCapturing() {
- logger.removeAppender(appender);
-
- }
- }
-
-
/**
* Mockito answer helper that triggers one latch as soon as the
* method is called, then waits on another before continuing.
*/
public static class DelayAnswer implements Answer