@@ -59,7 +60,7 @@ import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
@InterfaceStability.Evolving
public class BufferedMutatorImpl implements BufferedMutator {
- private static final Log LOG = LogFactory.getLog(BufferedMutatorImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(BufferedMutatorImpl.class);
private final ExceptionListener listener;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientIdGenerator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientIdGenerator.java
index d48462f974..d4b4b4a902 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientIdGenerator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientIdGenerator.java
@@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.client;
import java.io.IOException;
import java.lang.management.ManagementFactory;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.Addressing;
import org.apache.hadoop.hbase.util.Bytes;
@@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.util.Bytes;
*/
@InterfaceAudience.Private
final class ClientIdGenerator {
- private static final Log LOG = LogFactory.getLog(ClientIdGenerator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ClientIdGenerator.class);
private ClientIdGenerator() {}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java
index 738f095c01..a0f7ad8cfa 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java
@@ -30,8 +30,6 @@ import java.util.Queue;
import java.util.concurrent.ExecutorService;
import org.apache.commons.lang3.mutable.MutableBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -41,6 +39,8 @@ import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.UnknownScannerException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.ScannerCallable.MoreResults;
import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;
import org.apache.hadoop.hbase.exceptions.ScannerResetException;
@@ -56,7 +56,7 @@ import org.apache.hadoop.hbase.util.Bytes;
@InterfaceAudience.Private
public abstract class ClientScanner extends AbstractClientScanner {
- private static final Log LOG = LogFactory.getLog(ClientScanner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ClientScanner.class);
protected final Scan scan;
protected boolean closed = false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterStatusListener.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterStatusListener.java
index 7f20436705..5ff1b67c61 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterStatusListener.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterStatusListener.java
@@ -31,8 +31,6 @@ import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@@ -42,7 +40,8 @@ import org.apache.hadoop.hbase.util.Addressing;
import org.apache.hadoop.hbase.util.ExceptionUtil;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.io.netty.bootstrap.Bootstrap;
import org.apache.hadoop.hbase.shaded.io.netty.buffer.ByteBufInputStream;
import org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerContext;
@@ -63,7 +62,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos;
*/
@InterfaceAudience.Private
class ClusterStatusListener implements Closeable {
- private static final Log LOG = LogFactory.getLog(ClusterStatusListener.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ClusterStatusListener.class);
private final List deadServers = new ArrayList<>();
protected final DeadServerHandler deadServerHandler;
private final Listener listener;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
index 5d71d9cb58..1f34dba5b4 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
@@ -49,8 +49,6 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.ReentrantLock;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CallQueueTooBigException;
import org.apache.hadoop.hbase.DoNotRetryIOException;
@@ -71,6 +69,7 @@ import org.apache.hadoop.hbase.exceptions.RegionMovedException;
import org.apache.hadoop.hbase.ipc.RpcClient;
import org.apache.hadoop.hbase.ipc.RpcClientFactory;
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.Bytes;
@@ -82,7 +81,8 @@ import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
@@ -138,7 +138,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.Updat
@InterfaceAudience.Private
class ConnectionImplementation implements ClusterConnection, Closeable {
public static final String RETRIES_BY_SERVER_KEY = "hbase.client.retries.by.server";
- private static final Log LOG = LogFactory.getLog(ConnectionImplementation.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ConnectionImplementation.class);
private static final String RESOLVE_HOSTNAME_ON_FAIL_KEY = "hbase.resolve.hostnames.on.failure";
@@ -1882,9 +1882,9 @@ class ConnectionImplementation implements ClusterConnection, Closeable {
@Override
public void abort(final String msg, Throwable t) {
if (t != null) {
- LOG.fatal(msg, t);
+ LOG.error(HBaseMarkers.FATAL, msg, t);
} else {
- LOG.fatal(msg);
+ LOG.error(HBaseMarkers.FATAL, msg);
}
this.aborted = true;
close();
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java
index e27bf71850..f3489e1353 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java
@@ -32,8 +32,6 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
@@ -51,7 +49,8 @@ import org.apache.hadoop.hbase.util.ReflectionUtils;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.net.DNS;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
@@ -67,7 +66,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterServ
@InterfaceAudience.Private
public final class ConnectionUtils {
- private static final Log LOG = LogFactory.getLog(ConnectionUtils.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ConnectionUtils.class);
private ConnectionUtils() {
}
@@ -110,7 +109,7 @@ public final class ConnectionUtils {
* @param log Used to log what we set in here.
*/
public static void setServerSideHConnectionRetriesConfig(final Configuration c, final String sn,
- final Log log) {
+ final Logger log) {
// TODO: Fix this. Not all connections from server side should have 10 times the retries.
int hcRetries = c.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER,
HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER);
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DelayingRunner.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DelayingRunner.java
index 05f2511731..8ab5d850d2 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DelayingRunner.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DelayingRunner.java
@@ -17,9 +17,9 @@
*/
package org.apache.hadoop.hbase.client;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import java.util.List;
@@ -40,7 +40,7 @@ import java.util.Map;
*/
@InterfaceAudience.Private
public class DelayingRunner implements Runnable {
- private static final Log LOG = LogFactory.getLog(DelayingRunner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DelayingRunner.class);
private final Object sleepLock = new Object();
private boolean triggerWake = false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FlushRegionCallable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FlushRegionCallable.java
index 442bf1d270..bb265a43f6 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FlushRegionCallable.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FlushRegionCallable.java
@@ -20,15 +20,14 @@ package org.apache.hadoop.hbase.client;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.ipc.HBaseRpcController;
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
@@ -38,7 +37,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegion
*/
@InterfaceAudience.Private
public class FlushRegionCallable extends RegionAdminServiceCallable {
- private static final Log LOG = LogFactory.getLog(FlushRegionCallable.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FlushRegionCallable.class);
private final byte[] regionName;
private final boolean writeFlushWalMarker;
private boolean reload;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java
index 059a5fd1ea..80b8a221d3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java
@@ -30,10 +30,10 @@ import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.hadoop.hbase.security.access.Permission;
@@ -66,7 +66,7 @@ import org.apache.hadoop.hbase.util.Bytes;
@InterfaceAudience.Public
public class Get extends Query
implements Row, Comparable {
- private static final Log LOG = LogFactory.getLog(Get.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Get.class);
private byte [] row = null;
private int maxVersions = 1;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
index 207d28b912..600ee69614 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
@@ -46,8 +46,6 @@ import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.Stream;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.CacheEvictionStats;
@@ -103,6 +101,8 @@ import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -223,7 +223,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class HBaseAdmin implements Admin {
- private static final Log LOG = LogFactory.getLog(HBaseAdmin.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HBaseAdmin.class);
private ClusterConnection connection;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
index 939398fc2e..8d15140da8 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
@@ -26,8 +26,6 @@ import com.google.protobuf.Message;
import com.google.protobuf.Service;
import com.google.protobuf.ServiceException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CompareOperator;
@@ -38,6 +36,8 @@ import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.coprocessor.Batch;
import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback;
import org.apache.hadoop.hbase.filter.BinaryComparator;
@@ -103,7 +103,7 @@ import static org.apache.hadoop.hbase.client.ConnectionUtils.checkHasFamilies;
@InterfaceAudience.Private
@InterfaceStability.Stable
public class HTable implements Table {
- private static final Log LOG = LogFactory.getLog(HTable.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HTable.class);
private static final Consistency DEFAULT_CONSISTENCY = Consistency.STRONG;
private final ClusterConnection connection;
private final TableName tableName;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java
index 77d4fb2923..a33fd1d9b6 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java
@@ -35,8 +35,6 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
@@ -46,7 +44,8 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
@@ -66,7 +65,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa
*/
@InterfaceAudience.Public
public class HTableMultiplexer {
- private static final Log LOG = LogFactory.getLog(HTableMultiplexer.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(HTableMultiplexer.class.getName());
public static final String TABLE_MULTIPLEXER_FLUSH_PERIOD_MS =
"hbase.tablemultiplexer.flush.period.ms";
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetaCache.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetaCache.java
index 6dc46d39d0..a3677890d4 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetaCache.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetaCache.java
@@ -27,8 +27,6 @@ import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ConcurrentNavigableMap;
import java.util.concurrent.CopyOnWriteArraySet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.RegionLocations;
@@ -37,6 +35,8 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.types.CopyOnWriteArrayMap;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A cache implementation for region locations from meta.
@@ -44,7 +44,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class MetaCache {
- private static final Log LOG = LogFactory.getLog(MetaCache.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MetaCache.class);
/**
* Map of table to table {@link HRegionLocation}s.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PreemptiveFastFailInterceptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PreemptiveFastFailInterceptor.java
index edcc8d6cf7..e38d8faca0 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PreemptiveFastFailInterceptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PreemptiveFastFailInterceptor.java
@@ -27,12 +27,12 @@ import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.apache.commons.lang3.mutable.MutableBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.exceptions.ClientExceptionsUtil;
import org.apache.hadoop.hbase.exceptions.PreemptiveFastFailException;
import org.apache.hadoop.hbase.ipc.CallTimeoutException;
@@ -66,8 +66,8 @@ import org.apache.hadoop.ipc.RemoteException;
@InterfaceAudience.Private
class PreemptiveFastFailInterceptor extends RetryingCallerInterceptor {
- private static final Log LOG = LogFactory
- .getLog(PreemptiveFastFailInterceptor.class);
+ private static final Logger LOG = LoggerFactory
+ .getLogger(PreemptiveFastFailInterceptor.class);
// amount of time to wait before we consider a server to be in fast fail
// mode
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
index 5e9356a1fe..bb427b1b49 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
@@ -45,8 +45,6 @@ import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.io.IOUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.AsyncMetaTableAccessor;
import org.apache.hadoop.hbase.ClusterStatus;
@@ -87,7 +85,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.ForeignExceptionUtil;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
import org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer;
import org.apache.hadoop.hbase.shaded.io.netty.util.Timeout;
@@ -270,7 +269,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos;
class RawAsyncHBaseAdmin implements AsyncAdmin {
public static final String FLUSH_TABLE_PROCEDURE_SIGNATURE = "flush-table-proc";
- private static final Log LOG = LogFactory.getLog(AsyncHBaseAdmin.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AsyncHBaseAdmin.class);
private final AsyncConnectionImpl connection;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorRpcChannel.java
index ee954379fb..448302c854 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorRpcChannel.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorRpcChannel.java
@@ -19,11 +19,11 @@ package org.apache.hadoop.hbase.client;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse;
@@ -43,7 +43,7 @@ import com.google.protobuf.RpcController;
*/
@InterfaceAudience.Private
class RegionCoprocessorRpcChannel extends SyncCoprocessorRpcChannel {
- private static final Log LOG = LogFactory.getLog(RegionCoprocessorRpcChannel.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionCoprocessorRpcChannel.class);
private final TableName table;
private final byte [] row;
private final ClusterConnection conn;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionInfoBuilder.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionInfoBuilder.java
index e17e307205..223b6fd3da 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionInfoBuilder.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionInfoBuilder.java
@@ -21,16 +21,16 @@ package org.apache.hadoop.hbase.client;
import java.util.Arrays;
import org.apache.commons.lang3.ArrayUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class RegionInfoBuilder {
- private static final Log LOG = LogFactory.getLog(RegionInfoBuilder.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionInfoBuilder.class);
/** A non-capture group so that this can be embedded. */
public static final String ENCODED_REGION_NAME_REGEX = "(?:[a-f0-9]+)";
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ResultBoundedCompletionService.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ResultBoundedCompletionService.java
index b05ad64146..70d32d5140 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ResultBoundedCompletionService.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ResultBoundedCompletionService.java
@@ -26,10 +26,10 @@ import java.util.concurrent.RunnableFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.trace.TraceUtil;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
/**
@@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
*/
@InterfaceAudience.Private
public class ResultBoundedCompletionService {
- private static final Log LOG = LogFactory.getLog(ResultBoundedCompletionService.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ResultBoundedCompletionService.class);
private final RpcRetryingCallerFactory retryingCallerFactory;
private final Executor executor;
private final QueueingFuture[] tasks; // all the tasks
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetryingCallerInterceptorFactory.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetryingCallerInterceptorFactory.java
index b5287a785e..838e8fc695 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetryingCallerInterceptorFactory.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetryingCallerInterceptorFactory.java
@@ -20,11 +20,11 @@ package org.apache.hadoop.hbase.client;
import java.lang.reflect.Constructor;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Factory implementation to provide the {@link ConnectionImplementation} with
@@ -35,8 +35,8 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
class RetryingCallerInterceptorFactory {
- private static final Log LOG = LogFactory
- .getLog(RetryingCallerInterceptorFactory.class);
+ private static final Logger LOG = LoggerFactory
+ .getLogger(RetryingCallerInterceptorFactory.class);
private Configuration conf;
private final boolean failFast;
public static final RetryingCallerInterceptor NO_OP_INTERCEPTOR =
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerFactory.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerFactory.java
index d03fe9fdbc..e7a3d18010 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerFactory.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerFactory.java
@@ -17,12 +17,12 @@
*/
package org.apache.hadoop.hbase.client;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.util.ReflectionUtils;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Factory to create an {@link RpcRetryingCaller}
@@ -32,7 +32,7 @@ public class RpcRetryingCallerFactory {
/** Configuration key for a custom {@link RpcRetryingCaller} */
public static final String CUSTOM_CALLER_CONF_KEY = "hbase.rpc.callerfactory.class";
- private static final Log LOG = LogFactory.getLog(RpcRetryingCallerFactory.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RpcRetryingCallerFactory.class);
protected final Configuration conf;
private final long pause;
private final long pauseForCQTBE;// pause for CallQueueTooBigException, if specified
@@ -47,7 +47,7 @@ public class RpcRetryingCallerFactory {
public RpcRetryingCallerFactory(Configuration conf) {
this(conf, RetryingCallerInterceptorFactory.NO_OP_INTERCEPTOR);
}
-
+
public RpcRetryingCallerFactory(Configuration conf, RetryingCallerInterceptor interceptor) {
this.conf = conf;
pause = conf.getLong(HConstants.HBASE_CLIENT_PAUSE,
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java
index 524281804c..7d0e9a05de 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java
@@ -28,8 +28,7 @@ import java.net.SocketTimeoutException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.hbase.CallQueueTooBigException;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.exceptions.PreemptiveFastFailException;
@@ -38,7 +37,8 @@ import org.apache.hadoop.hbase.util.ExceptionUtil;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
/**
@@ -53,7 +53,7 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
@InterfaceAudience.Private
public class RpcRetryingCallerImpl implements RpcRetryingCaller {
// LOG is being used in TestMultiRowRangeFilter, hence leaving it public
- public static final Log LOG = LogFactory.getLog(RpcRetryingCallerImpl.class);
+ public static final Logger LOG = LoggerFactory.getLogger(RpcRetryingCallerImpl.class);
/** How many retries are allowed before we start to log */
private final int startLogErrorsCnt;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java
index c6ba228a78..4a31cff4a7 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java
@@ -28,8 +28,6 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HBaseIOException;
@@ -37,6 +35,8 @@ import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.RegionLocations;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.ipc.HBaseRpcController;
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -54,7 +54,9 @@ import static org.apache.hadoop.hbase.HConstants.PRIORITY_UNSET;
*/
@InterfaceAudience.Private
public class RpcRetryingCallerWithReadReplicas {
- private static final Log LOG = LogFactory.getLog(RpcRetryingCallerWithReadReplicas.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(RpcRetryingCallerWithReadReplicas.class);
+
protected final ExecutorService pool;
protected final ClusterConnection cConnection;
protected final Configuration conf;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
index 266785854e..7139b26da9 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
@@ -29,10 +29,10 @@ import java.util.NavigableSet;
import java.util.TreeMap;
import java.util.TreeSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.IncompatibleFilterException;
@@ -87,7 +87,7 @@ import org.apache.hadoop.hbase.util.Bytes;
*/
@InterfaceAudience.Public
public class Scan extends Query {
- private static final Log LOG = LogFactory.getLog(Scan.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Scan.class);
private static final String RAW_ATTR = "_raw_";
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java
index 87a05d68b9..45b74ef938 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java
@@ -27,8 +27,6 @@ import static org.apache.hadoop.hbase.client.ConnectionUtils.updateServerSideMet
import java.io.IOException;
import java.io.InterruptedIOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HBaseIOException;
@@ -40,6 +38,8 @@ import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.UnknownScannerException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
import org.apache.hadoop.hbase.exceptions.ScannerResetException;
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
@@ -62,7 +62,7 @@ public class ScannerCallable extends ClientServiceCallable {
public static final String LOG_SCANNER_ACTIVITY = "hbase.client.log.scanner.activity";
// Keeping LOG public as it is being used in TestScannerHeartbeatMessages
- public static final Log LOG = LogFactory.getLog(ScannerCallable.class);
+ public static final Logger LOG = LoggerFactory.getLogger(ScannerCallable.class);
protected long scannerId = -1L;
protected boolean instantiated = false;
protected boolean closed = false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java
index 9dd1052c2b..3cf377b895 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java
@@ -32,14 +32,14 @@ import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.RegionLocations;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.ScannerCallable.MoreResults;
import org.apache.hadoop.hbase.util.Pair;
@@ -57,7 +57,7 @@ import org.apache.hadoop.hbase.util.Pair;
*/
@InterfaceAudience.Private
class ScannerCallableWithReplicas implements RetryingCallable {
- private static final Log LOG = LogFactory.getLog(ScannerCallableWithReplicas.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ScannerCallableWithReplicas.class);
volatile ScannerCallable currentScannerCallable;
AtomicBoolean replicaSwitched = new AtomicBoolean(false);
final ClusterConnection cConnection;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SimpleRequestController.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SimpleRequestController.java
index ddcfe0b169..023188043c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SimpleRequestController.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SimpleRequestController.java
@@ -34,8 +34,7 @@ import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Consumer;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
@@ -43,6 +42,8 @@ import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.ServerName;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.Bytes;
import static org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent;
import org.apache.hadoop.hbase.util.EnvironmentEdge;
@@ -54,7 +55,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@InterfaceAudience.Private
@InterfaceStability.Evolving
class SimpleRequestController implements RequestController {
- private static final Log LOG = LogFactory.getLog(SimpleRequestController.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SimpleRequestController.class);
/**
* The maximum heap size for each request.
*/
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SyncCoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SyncCoprocessorRpcChannel.java
index 36d2b31173..6b4419d538 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SyncCoprocessorRpcChannel.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SyncCoprocessorRpcChannel.java
@@ -25,9 +25,9 @@ import com.google.protobuf.ServiceException;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
@@ -39,7 +39,7 @@ import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
*/
@InterfaceAudience.Public
abstract class SyncCoprocessorRpcChannel implements CoprocessorRpcChannel {
- private static final Log LOG = LogFactory.getLog(SyncCoprocessorRpcChannel.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SyncCoprocessorRpcChannel.class);
@Override
@InterfaceAudience.Private
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
index 5c4f7210c9..9f40ae6c13 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
@@ -34,8 +34,6 @@ import java.util.function.Function;
import java.util.regex.Matcher;
import java.util.stream.Stream;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HConstants;
@@ -46,13 +44,15 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* @since 2.0.0
*/
@InterfaceAudience.Public
public class TableDescriptorBuilder {
- public static final Log LOG = LogFactory.getLog(TableDescriptorBuilder.class);
+ public static final Logger LOG = LoggerFactory.getLogger(TableDescriptorBuilder.class);
@InterfaceAudience.Private
public static final String SPLIT_POLICY = "SPLIT_POLICY";
private static final Bytes SPLIT_POLICY_KEY = new Bytes(Bytes.toBytes(SPLIT_POLICY));
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ZKAsyncRegistry.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ZKAsyncRegistry.java
index bd8325e9d9..200d24dc2d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ZKAsyncRegistry.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ZKAsyncRegistry.java
@@ -28,8 +28,6 @@ import java.io.IOException;
import java.util.concurrent.CompletableFuture;
import org.apache.commons.lang3.mutable.MutableInt;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClusterId;
import org.apache.hadoop.hbase.HRegionLocation;
@@ -41,7 +39,8 @@ import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.zookeeper.ReadOnlyZKClient;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos;
@@ -52,7 +51,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos;
@InterfaceAudience.Private
class ZKAsyncRegistry implements AsyncRegistry {
- private static final Log LOG = LogFactory.getLog(ZKAsyncRegistry.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ZKAsyncRegistry.class);
private final ReadOnlyZKClient zk;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ClientBackoffPolicyFactory.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ClientBackoffPolicyFactory.java
index 14c9c8abe9..0358947237 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ClientBackoffPolicyFactory.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ClientBackoffPolicyFactory.java
@@ -17,19 +17,19 @@
*/
package org.apache.hadoop.hbase.client.backoff;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ServerName;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.ReflectionUtils;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public final class ClientBackoffPolicyFactory {
- private static final Log LOG = LogFactory.getLog(ClientBackoffPolicyFactory.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ClientBackoffPolicyFactory.class);
private ClientBackoffPolicyFactory() {
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ExponentialClientBackoffPolicy.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ExponentialClientBackoffPolicy.java
index 53d67762e9..6bd3fb5b15 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ExponentialClientBackoffPolicy.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ExponentialClientBackoffPolicy.java
@@ -17,13 +17,12 @@
*/
package org.apache.hadoop.hbase.client.backoff;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
/**
@@ -33,7 +32,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
@InterfaceAudience.Public
public class ExponentialClientBackoffPolicy implements ClientBackoffPolicy {
- private static final Log LOG = LogFactory.getLog(ExponentialClientBackoffPolicy.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ExponentialClientBackoffPolicy.class);
private static final long ONE_MINUTE = 60 * 1000;
public static final long DEFAULT_MAX_BACKOFF = 5 * ONE_MINUTE;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java
index 5a5913c06a..a5081cb7be 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java
@@ -30,13 +30,13 @@ import java.util.Set;
import java.util.TreeMap;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ReplicationPeerNotFoundException;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -72,7 +72,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@InterfaceAudience.Public
@Deprecated
public class ReplicationAdmin implements Closeable {
- private static final Log LOG = LogFactory.getLog(ReplicationAdmin.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationAdmin.class);
public static final String TNAME = "tableName";
public static final String CFNAME = "columnFamilyName";
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java
index d8c86f09d4..ec80eca0f5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java
@@ -18,37 +18,38 @@
*/
package org.apache.hadoop.hbase.client.replication;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString;
-import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CompoundConfiguration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
-import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos;
import org.apache.hadoop.hbase.replication.ReplicationException;
import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
import org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Strings;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
-import java.io.IOException;
-import java.util.Collection;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.ArrayList;
-import java.util.Set;
-import java.util.stream.Collectors;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos;
/**
* Helper for TableCFs Operations.
@@ -57,7 +58,7 @@ import java.util.stream.Collectors;
@InterfaceStability.Stable
public final class ReplicationPeerConfigUtil {
- private static final Log LOG = LogFactory.getLog(ReplicationPeerConfigUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationPeerConfigUtil.class);
private ReplicationPeerConfigUtil() {}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionMovedException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionMovedException.java
index 07925804f2..e79c138e70 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionMovedException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionMovedException.java
@@ -17,13 +17,13 @@
*/
package org.apache.hadoop.hbase.exceptions;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.ServerName;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Subclass if the server knows the region is now on another server.
@@ -32,7 +32,7 @@ import org.apache.yetus.audience.InterfaceStability;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class RegionMovedException extends NotServingRegionException {
- private static final Log LOG = LogFactory.getLog(RegionMovedException.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionMovedException.class);
private static final long serialVersionUID = -7232903522310558396L;
private final String hostname;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionOpeningException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionOpeningException.java
index 5c708e1586..a854b996ae 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionOpeningException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionOpeningException.java
@@ -18,11 +18,11 @@
*/
package org.apache.hadoop.hbase.exceptions;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Subclass if the server knows the region is now on another server.
@@ -31,7 +31,7 @@ import org.apache.yetus.audience.InterfaceStability;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class RegionOpeningException extends NotServingRegionException {
- private static final Log LOG = LogFactory.getLog(RegionOpeningException.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionOpeningException.class);
private static final long serialVersionUID = -7232903522310558395L;
public RegionOpeningException(String message) {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java
index 6ebe2fe911..e984212985 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java
@@ -31,10 +31,10 @@ import java.util.Map;
import java.util.Set;
import java.util.Stack;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.util.Bytes;
@@ -49,7 +49,7 @@ import org.apache.hadoop.hbase.util.Bytes;
*/
@InterfaceAudience.Public
public class ParseFilter {
- private static final Log LOG = LogFactory.getLog(ParseFilter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ParseFilter.class);
private static HashMap operatorPrecedenceHashMap;
private static HashMap filterHashMap;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java
index 89cebbee02..d1caaf0216 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java
@@ -23,8 +23,6 @@ import java.nio.charset.IllegalCharsetNameException;
import java.util.Arrays;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos;
@@ -36,7 +34,8 @@ import org.joni.Matcher;
import org.joni.Option;
import org.joni.Regex;
import org.joni.Syntax;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
@@ -72,7 +71,7 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE
@InterfaceAudience.Public
public class RegexStringComparator extends ByteArrayComparable {
- private static final Log LOG = LogFactory.getLog(RegexStringComparator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegexStringComparator.class);
private Engine engine;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
index 0e7f376ff6..2e902af0d7 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
@@ -49,12 +49,12 @@ import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.MetricsConnection;
import org.apache.hadoop.hbase.codec.Codec;
import org.apache.hadoop.hbase.codec.KeyValueCodec;
@@ -94,7 +94,7 @@ import org.apache.hadoop.security.token.TokenSelector;
@InterfaceAudience.Private
public abstract class AbstractRpcClient implements RpcClient {
// Log level is being changed in tests
- public static final Log LOG = LogFactory.getLog(AbstractRpcClient.class);
+ public static final Logger LOG = LoggerFactory.getLogger(AbstractRpcClient.class);
protected static final HashedWheelTimer WHEEL_TIMER = new HashedWheelTimer(
Threads.newDaemonThreadFactory("RpcClient-timer"), 10, TimeUnit.MILLISECONDS);
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java
index bd761802c6..113babb4b8 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java
@@ -44,14 +44,14 @@ import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ThreadLocalRandom;
import javax.security.sasl.SaslException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.exceptions.ConnectionClosingException;
import org.apache.hadoop.hbase.io.ByteArrayOutputStream;
import org.apache.hadoop.hbase.ipc.HBaseRpcController.CancellationCallback;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.security.HBaseSaslRpcClient;
import org.apache.hadoop.hbase.security.SaslUtil;
import org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
@@ -65,7 +65,8 @@ import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils;
import org.apache.htrace.core.TraceScope;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message.Builder;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
@@ -85,7 +86,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeade
@InterfaceAudience.Private
class BlockingRpcConnection extends RpcConnection implements Runnable {
- private static final Log LOG = LogFactory.getLog(BlockingRpcConnection.class);
+ private static final Logger LOG = LoggerFactory.getLogger(BlockingRpcConnection.class);
private final BlockingRpcClient rpcClient;
@@ -419,7 +420,7 @@ class BlockingRpcConnection extends RpcConnection implements Runnable {
if (ex instanceof SaslException) {
String msg = "SASL authentication failed."
+ " The most likely cause is missing or invalid credentials." + " Consider 'kinit'.";
- LOG.fatal(msg, ex);
+ LOG.error(HBaseMarkers.FATAL, msg, ex);
throw new RuntimeException(msg, ex);
}
throw new IOException(ex);
@@ -568,8 +569,9 @@ class BlockingRpcConnection extends RpcConnection implements Runnable {
}
waitingConnectionHeaderResponse = false;
} catch (SocketTimeoutException ste) {
- LOG.fatal("Can't get the connection header response for rpc timeout, please check if" +
- " server has the correct configuration to support the additional function.", ste);
+ LOG.error(HBaseMarkers.FATAL, "Can't get the connection header response for rpc timeout, "
+ + "please check if server has the correct configuration to support the additional "
+ + "function.", ste);
// timeout when waiting the connection header response, ignore the additional function
throw new IOException("Timeout while waiting connection header response", ste);
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CellBlockBuilder.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CellBlockBuilder.java
index 69c9e39d3c..5c6ddbfc35 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CellBlockBuilder.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CellBlockBuilder.java
@@ -29,13 +29,13 @@ import java.nio.BufferOverflowException;
import java.nio.ByteBuffer;
import org.apache.commons.io.IOUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.codec.Codec;
import org.apache.hadoop.hbase.io.ByteBuffInputStream;
import org.apache.hadoop.hbase.io.ByteBufferInputStream;
@@ -58,7 +58,7 @@ import org.apache.hadoop.io.compress.Decompressor;
class CellBlockBuilder {
// LOG is being used in TestCellBlockBuilder
- static final Log LOG = LogFactory.getLog(CellBlockBuilder.class);
+ static final Logger LOG = LoggerFactory.getLogger(CellBlockBuilder.class);
private final Configuration conf;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.java
index 4f0e5e65d0..7eb29f94e3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.java
@@ -23,11 +23,11 @@ import static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Regi
import java.io.IOException;
import java.io.InterruptedIOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
@@ -52,7 +52,7 @@ import edu.umd.cs.findbugs.annotations.Nullable;
*/
@InterfaceAudience.Private
public final class CoprocessorRpcUtils {
- private static final Log LOG = LogFactory.getLog(CoprocessorRpcUtils.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CoprocessorRpcUtils.class);
/**
* We assume that all HBase protobuf services share a common package name
* (defined in the .proto files).
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FailedServers.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FailedServers.java
index 3cb8f016aa..9b573adb4a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FailedServers.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FailedServers.java
@@ -23,10 +23,10 @@ import java.util.Iterator;
import java.util.LinkedList;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Pair;
@@ -38,7 +38,7 @@ public class FailedServers {
private final Map failedServers = new HashMap();
private long latestExpiry = 0;
private final int recheckServersTimeout;
- private static final Log LOG = LogFactory.getLog(FailedServers.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FailedServers.class);
public FailedServers(Configuration conf) {
this.recheckServersTimeout = conf.getInt(
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java
index 5a012a1e43..581483a22d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java
@@ -49,9 +49,9 @@ import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.ipc.BufferCallBeforeInitHandler.BufferCallEvent;
import org.apache.hadoop.hbase.ipc.HBaseRpcController.CancellationCallback;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader;
@@ -70,7 +70,7 @@ import org.apache.hadoop.security.UserGroupInformation;
@InterfaceAudience.Private
class NettyRpcConnection extends RpcConnection {
- private static final Log LOG = LogFactory.getLog(NettyRpcConnection.class);
+ private static final Logger LOG = LoggerFactory.getLogger(NettyRpcConnection.class);
private static final ScheduledExecutorService RELOGIN_EXECUTOR =
Executors.newSingleThreadScheduledExecutor(Threads.newDaemonThreadFactory("Relogin"));
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcDuplexHandler.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcDuplexHandler.java
index 062255bf47..13be390e15 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcDuplexHandler.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcDuplexHandler.java
@@ -34,10 +34,10 @@ import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.codec.Codec;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse;
@@ -53,7 +53,7 @@ import org.apache.hadoop.ipc.RemoteException;
@InterfaceAudience.Private
class NettyRpcDuplexHandler extends ChannelDuplexHandler {
- private static final Log LOG = LogFactory.getLog(NettyRpcDuplexHandler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(NettyRpcDuplexHandler.class);
private final NettyRpcConnection conn;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java
index bdedcf8bcb..582067f744 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java
@@ -25,11 +25,11 @@ import java.io.IOException;
import java.net.UnknownHostException;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.codec.Codec;
import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -52,7 +52,7 @@ import org.apache.hadoop.security.token.TokenSelector;
@InterfaceAudience.Private
abstract class RpcConnection {
- private static final Log LOG = LogFactory.getLog(RpcConnection.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RpcConnection.class);
protected final ConnectionId remoteId;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcControllerFactory.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcControllerFactory.java
index 16fe27df52..e944ec2cd2 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcControllerFactory.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcControllerFactory.java
@@ -19,12 +19,12 @@ package org.apache.hadoop.hbase.ipc;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CellScannable;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.ReflectionUtils;
/**
@@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.util.ReflectionUtils;
*/
@InterfaceAudience.Private
public class RpcControllerFactory {
- private static final Log LOG = LogFactory.getLog(RpcControllerFactory.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RpcControllerFactory.class);
/**
* Custom RPC Controller factory allows frameworks to change the RPC controller. If the configured
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java
index 5d1634a392..1e79186fb8 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java
@@ -25,11 +25,11 @@ import java.util.LinkedList;
import java.util.Objects;
import java.util.Queue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Result;
@@ -44,7 +44,7 @@ import org.apache.hadoop.util.StringUtils;
*/
@InterfaceAudience.Public
public class QuotaRetriever implements Closeable, Iterable {
- private static final Log LOG = LogFactory.getLog(QuotaRetriever.class);
+ private static final Logger LOG = LoggerFactory.getLogger(QuotaRetriever.class);
private final Queue cache = new LinkedList<>();
private ResultScanner scanner;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaTableUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaTableUtil.java
index e5573e4d28..9e5f092ed6 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaTableUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaTableUtil.java
@@ -28,8 +28,6 @@ import java.util.Map;
import java.util.Objects;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CompareOperator;
@@ -38,6 +36,8 @@ import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.ClusterConnection;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Get;
@@ -89,7 +89,7 @@ import org.apache.hadoop.hbase.util.Strings;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class QuotaTableUtil {
- private static final Log LOG = LogFactory.getLog(QuotaTableUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(QuotaTableUtil.class);
/** System table for quotas */
public static final TableName QUOTA_TABLE_NAME =
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AbstractHBaseSaslRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AbstractHBaseSaslRpcClient.java
index 290778ad6c..b65b30817f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AbstractHBaseSaslRpcClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AbstractHBaseSaslRpcClient.java
@@ -33,9 +33,9 @@ import javax.security.sasl.Sasl;
import javax.security.sasl.SaslClient;
import javax.security.sasl.SaslException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
@@ -47,7 +47,7 @@ import org.apache.hadoop.security.token.TokenIdentifier;
@InterfaceAudience.Private
public abstract class AbstractHBaseSaslRpcClient {
- private static final Log LOG = LogFactory.getLog(AbstractHBaseSaslRpcClient.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AbstractHBaseSaslRpcClient.class);
private static final byte[] EMPTY_TOKEN = new byte[0];
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java
index 8a40fc12e2..7bba8b8466 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java
@@ -28,13 +28,13 @@ import java.util.Properties;
import javax.crypto.spec.SecretKeySpec;
import org.apache.commons.crypto.cipher.CryptoCipherFactory;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.io.crypto.Cipher;
import org.apache.hadoop.hbase.io.crypto.Encryption;
@@ -50,7 +50,7 @@ import org.apache.hadoop.hbase.util.Bytes;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public final class EncryptionUtil {
- static private final Log LOG = LogFactory.getLog(EncryptionUtil.class);
+ static private final Logger LOG = LoggerFactory.getLogger(EncryptionUtil.class);
static private final SecureRandom RNG = new SecureRandom();
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java
index 5d502f1815..37d3cddfaa 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java
@@ -32,10 +32,10 @@ import java.nio.ByteBuffer;
import javax.security.sasl.Sasl;
import javax.security.sasl.SaslException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.io.crypto.aes.CryptoAES;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos;
import org.apache.hadoop.io.WritableUtils;
@@ -52,7 +52,7 @@ import org.apache.hadoop.security.token.TokenIdentifier;
@InterfaceAudience.Private
public class HBaseSaslRpcClient extends AbstractHBaseSaslRpcClient {
- private static final Log LOG = LogFactory.getLog(HBaseSaslRpcClient.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HBaseSaslRpcClient.class);
private boolean cryptoAesEnable;
private CryptoAES cryptoAES;
private InputStream saslInputStream;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClient.java
index 8b3fc5b438..55f487dc15 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClient.java
@@ -24,9 +24,9 @@ import java.io.IOException;
import javax.security.sasl.Sasl;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
@@ -36,7 +36,7 @@ import org.apache.hadoop.security.token.TokenIdentifier;
*/
@InterfaceAudience.Private
public class NettyHBaseSaslRpcClient extends AbstractHBaseSaslRpcClient {
- private static final Log LOG = LogFactory.getLog(NettyHBaseSaslRpcClient.class);
+ private static final Logger LOG = LoggerFactory.getLogger(NettyHBaseSaslRpcClient.class);
public NettyHBaseSaslRpcClient(AuthMethod method, Token extends TokenIdentifier> token,
String serverPrincipal, boolean fallbackAllowed, String rpcProtection) throws IOException {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClientHandler.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClientHandler.java
index af081957c5..db74726300 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClientHandler.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClientHandler.java
@@ -25,10 +25,10 @@ import org.apache.hadoop.hbase.shaded.io.netty.util.concurrent.Promise;
import java.io.IOException;
import java.security.PrivilegedExceptionAction;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.ipc.FallbackDisallowedException;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
@@ -41,7 +41,7 @@ import org.apache.hadoop.security.token.TokenIdentifier;
@InterfaceAudience.Private
public class NettyHBaseSaslRpcClientHandler extends SimpleChannelInboundHandler {
- private static final Log LOG = LogFactory.getLog(NettyHBaseSaslRpcClientHandler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(NettyHBaseSaslRpcClientHandler.class);
private final Promise saslPromise;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
index b30715a1e2..d37abdf72a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
@@ -28,13 +28,13 @@ import javax.security.sasl.SaslException;
import javax.security.sasl.SaslServer;
import org.apache.commons.codec.binary.Base64;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class SaslUtil {
- private static final Log LOG = LogFactory.getLog(SaslUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SaslUtil.class);
public static final String SASL_DEFAULT_REALM = "default";
public static final int SWITCH_TO_SIMPLE_AUTH = -88;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java
index 7ff311ef56..9fa6458be5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java
@@ -24,9 +24,9 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.VersionedWritable;
@@ -54,7 +54,7 @@ public class Permission extends VersionedWritable {
public byte code() { return code; }
}
- private static final Log LOG = LogFactory.getLog(Permission.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Permission.class);
protected static final Map ACTION_BY_CODE = Maps.newHashMap();
protected Action[] actions;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/UserPermission.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/UserPermission.java
index 94e9c0e882..72bd69f497 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/UserPermission.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/UserPermission.java
@@ -22,10 +22,10 @@ import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.Bytes;
/**
@@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.util.Bytes;
*/
@InterfaceAudience.Private
public class UserPermission extends TablePermission {
- private static final Log LOG = LogFactory.getLog(UserPermission.class);
+ private static final Logger LOG = LoggerFactory.getLogger(UserPermission.class);
private byte[] user;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSelector.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSelector.java
index 87826dff0a..39959ef61d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSelector.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSelector.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.security.token;
import java.util.Collection;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
@@ -31,7 +31,7 @@ import org.apache.hadoop.security.token.TokenSelector;
@InterfaceAudience.Private
public class AuthenticationTokenSelector
implements TokenSelector {
- private static final Log LOG = LogFactory.getLog(AuthenticationTokenSelector.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AuthenticationTokenSelector.class);
public AuthenticationTokenSelector() {
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ResponseConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ResponseConverter.java
index 305ec4d6e4..daa906e897 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ResponseConverter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ResponseConverter.java
@@ -23,8 +23,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.DoNotRetryIOException;
@@ -36,7 +34,8 @@ import org.apache.hadoop.hbase.ipc.ServerRpcController;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
@@ -68,7 +67,7 @@ import edu.umd.cs.findbugs.annotations.Nullable;
*/
@InterfaceAudience.Private
public final class ResponseConverter {
- private static final Log LOG = LogFactory.getLog(ResponseConverter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ResponseConverter.class);
private ResponseConverter() {
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ReadOnlyZKClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ReadOnlyZKClient.java
index 965a243937..b5d511cb4b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ReadOnlyZKClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ReadOnlyZKClient.java
@@ -30,15 +30,14 @@ import java.util.concurrent.Delayed;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.KeeperException.Code;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.data.Stat;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -47,7 +46,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
@InterfaceAudience.Private
public final class ReadOnlyZKClient implements Closeable {
- private static final Log LOG = LogFactory.getLog(ReadOnlyZKClient.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReadOnlyZKClient.class);
public static final String RECOVERY_RETRY = "zookeeper.recovery.retry";
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java
index c8a39574de..bad78264ac 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java
@@ -26,8 +26,6 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.testclassification.MiscTests;
@@ -38,6 +36,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test setting values in the descriptor
@@ -45,7 +45,7 @@ import org.junit.rules.TestName;
@Category({MiscTests.class, SmallTests.class})
@Deprecated
public class TestHTableDescriptor {
- private static final Log LOG = LogFactory.getLog(TestHTableDescriptor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHTableDescriptor.class);
@Rule
public TestName name = new TestName();
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java
index 7b5aa5cb75..37df4decf9 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java
@@ -25,10 +25,9 @@ import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.List;
+import java.util.Objects;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
import org.apache.hadoop.hbase.testclassification.SmallTests;
@@ -43,6 +42,8 @@ import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test cases for ensuring our client visible classes have annotations for
@@ -72,7 +73,7 @@ import org.junit.experimental.categories.Category;
public class TestInterfaceAudienceAnnotations {
private static final String HBASE_PROTOBUF = "org.apache.hadoop.hbase.protobuf.generated";
- private static final Log LOG = LogFactory.getLog(TestInterfaceAudienceAnnotations.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestInterfaceAudienceAnnotations.class);
/** Selects classes with generated in their package name */
static class GeneratedClassFilter implements ClassFinder.ClassFilter {
@@ -315,7 +316,7 @@ public class TestInterfaceAudienceAnnotations {
if (!classes.isEmpty()) {
LOG.info("These are the classes that DO NOT have @InterfaceAudience annotation:");
for (Class> clazz : classes) {
- LOG.info(clazz);
+ LOG.info(Objects.toString(clazz));
}
}
@@ -358,7 +359,7 @@ public class TestInterfaceAudienceAnnotations {
LOG.info("These are the @InterfaceAudience.Public classes that have @InterfaceStability " +
"annotation:");
for (Class> clazz : classes) {
- LOG.info(clazz);
+ LOG.info(Objects.toString(clazz));
}
}
@@ -403,7 +404,7 @@ public class TestInterfaceAudienceAnnotations {
LOG.info("These are the @InterfaceAudience.LimitedPrivate classes that DO NOT " +
"have @InterfaceStability annotation:");
for (Class> clazz : classes) {
- LOG.info(clazz);
+ LOG.info(Objects.toString(clazz));
}
}
Assert.assertEquals("All classes that are marked with @InterfaceAudience.LimitedPrivate " +
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
index f487568edf..dd2ac6ff6e 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
@@ -50,8 +50,6 @@ import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CallQueueTooBigException;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
@@ -80,12 +78,14 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestRule;
import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ClientTests.class, MediumTests.class})
public class TestAsyncProcess {
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()).
withLookingForStuckThread(true).build();
- private static final Log LOG = LogFactory.getLog(TestAsyncProcess.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestAsyncProcess.class);
private static final TableName DUMMY_TABLE =
TableName.valueOf("DUMMY_TABLE");
private static final byte[] DUMMY_BYTES_1 = "DUMMY_BYTES_1".getBytes(StandardCharsets.UTF_8);
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
index 9f3297626a..5a311d1f7f 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
@@ -25,6 +25,7 @@ import java.net.SocketTimeoutException;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Map;
+import java.util.Objects;
import java.util.Random;
import java.util.SortedMap;
import java.util.concurrent.CompletableFuture;
@@ -35,8 +36,6 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.lang3.NotImplementedException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.CellComparatorImpl;
@@ -65,7 +64,8 @@ import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Stopwatch;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
@@ -102,7 +102,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpeci
*/
@Category({ClientTests.class, SmallTests.class})
public class TestClientNoCluster extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(TestClientNoCluster.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestClientNoCluster.class);
private Configuration conf;
public static final ServerName META_SERVERNAME =
ServerName.valueOf("meta.example.org", 16010, 12345);
@@ -234,7 +234,7 @@ public class TestClientNoCluster extends Configured implements Tool {
try {
Result result = null;
while ((result = scanner.next()) != null) {
- LOG.info(result);
+ LOG.info(Objects.toString(result));
}
} finally {
scanner.close();
@@ -256,7 +256,7 @@ public class TestClientNoCluster extends Configured implements Tool {
try {
Result result = null;
while ((result = scanner.next()) != null) {
- LOG.info(result);
+ LOG.info(Objects.toString(result));
}
} finally {
scanner.close();
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestInterfaceAlign.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestInterfaceAlign.java
index ced1eb8885..6318bc496b 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestInterfaceAlign.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestInterfaceAlign.java
@@ -28,18 +28,18 @@ import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ ClientTests.class, SmallTests.class })
public class TestInterfaceAlign {
- private static final Log LOG = LogFactory.getLog(TestInterfaceAlign.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestInterfaceAlign.class);
/**
* Test methods name match up
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java
index 6c9aadd099..95369c73b4 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java
@@ -22,8 +22,6 @@ import static org.junit.Assert.fail;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
@@ -39,7 +37,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.mockito.Mockito;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
/**
@@ -48,7 +47,7 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
@Category({SmallTests.class, ClientTests.class})
public class TestSnapshotFromAdmin {
- private static final Log LOG = LogFactory.getLog(TestSnapshotFromAdmin.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSnapshotFromAdmin.class);
@Rule
public TestName name = new TestName();
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestTableDescriptorBuilder.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestTableDescriptorBuilder.java
index 3c159b2321..639d97401a 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestTableDescriptorBuilder.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestTableDescriptorBuilder.java
@@ -26,8 +26,6 @@ import static org.junit.Assert.fail;
import java.io.IOException;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
@@ -37,13 +35,15 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test setting values in the descriptor
*/
@Category({MiscTests.class, SmallTests.class})
public class TestTableDescriptorBuilder {
- private static final Log LOG = LogFactory.getLog(TestTableDescriptorBuilder.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestTableDescriptorBuilder.class);
@Rule
public TestName name = new TestName();
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestCellBlockBuilder.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestCellBlockBuilder.java
index 6fdb864c9f..1ae2c4a9a1 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestCellBlockBuilder.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestCellBlockBuilder.java
@@ -24,15 +24,12 @@ import java.nio.ByteBuffer;
import java.util.Arrays;
import org.apache.commons.lang3.time.StopWatch;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.codec.Codec;
import org.apache.hadoop.hbase.codec.KeyValueCodec;
import org.apache.hadoop.hbase.io.SizedCellScanner;
@@ -44,15 +41,16 @@ import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.DefaultCodec;
import org.apache.hadoop.io.compress.GzipCodec;
-import org.apache.log4j.Level;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ ClientTests.class, SmallTests.class })
public class TestCellBlockBuilder {
- private static final Log LOG = LogFactory.getLog(TestCellBlockBuilder.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestCellBlockBuilder.class);
private CellBlockBuilder builder;
@@ -190,7 +188,6 @@ public class TestCellBlockBuilder {
}
}
CellBlockBuilder builder = new CellBlockBuilder(HBaseConfiguration.create());
- ((Log4JLogger) CellBlockBuilder.LOG).getLogger().setLevel(Level.ALL);
timerTests(builder, count, size, new KeyValueCodec(), null);
timerTests(builder, count, size, new KeyValueCodec(), new DefaultCodec());
timerTests(builder, count, size, new KeyValueCodec(), new GzipCodec());
diff --git a/hbase-common/pom.xml b/hbase-common/pom.xml
index cf302eba5a..e660225299 100644
--- a/hbase-common/pom.xml
+++ b/hbase-common/pom.xml
@@ -201,8 +201,8 @@
hbase-shaded-miscellaneous
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apicommons-codec
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/AuthUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/AuthUtil.java
index fcfdee8455..5880b8c33b 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/AuthUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/AuthUtil.java
@@ -21,14 +21,14 @@ package org.apache.hadoop.hbase;
import java.io.IOException;
import java.net.UnknownHostException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.util.DNS;
import org.apache.hadoop.hbase.util.Strings;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Utility methods for helping with security tasks. Downstream users
@@ -68,7 +68,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Public
public class AuthUtil {
- private static final Log LOG = LogFactory.getLog(AuthUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AuthUtil.class);
/** Prefix character to denote group names */
private static final String GROUP_PREFIX = "@";
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java
index 2dd1bdb79a..771fdaa7d6 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java
@@ -18,13 +18,13 @@
package org.apache.hadoop.hbase;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.primitives.Longs;
@@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.primitives.Longs;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class CellComparatorImpl implements CellComparator {
- static final Log LOG = LogFactory.getLog(CellComparatorImpl.class);
+ static final Logger LOG = LoggerFactory.getLogger(CellComparatorImpl.class);
/**
* Comparator for plain key/values; i.e. non-catalog table key/values. Works on Key portion
* of KeyValue only.
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java
index ff437db5fc..5cf37aaab0 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java
@@ -27,10 +27,10 @@ import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ScheduledChore.ChoreServicer;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
@@ -55,7 +55,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Public
public class ChoreService implements ChoreServicer {
- private static final Log LOG = LogFactory.getLog(ChoreService.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ChoreService.class);
/**
* The minimum number of threads in the core pool of the underlying ScheduledThreadPoolExecutor
@@ -113,7 +113,7 @@ public class ChoreService implements ChoreServicer {
/**
* @param coreThreadPoolPrefix Prefix that will be applied to the Thread name of all threads
* spawned by this service
- * @param corePoolSize The initial size to set the core pool of the ScheduledThreadPoolExecutor
+ * @param corePoolSize The initial size to set the core pool of the ScheduledThreadPoolExecutor
* to during initialization. The default size is 1, but specifying a larger size may be
* beneficial if you know that 1 thread will not be enough.
* @param jitter Should chore service add some jitter for all of the scheduled chores. When set
@@ -331,7 +331,7 @@ public class ChoreService implements ChoreServicer {
scheduledChores.clear();
choresMissingStartTime.clear();
}
-
+
/**
* @return true when the service is shutdown and thus cannot be used anymore
*/
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java
index 6012fe85f8..d21ee9389f 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java
@@ -22,19 +22,19 @@ import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.util.VersionInfo;
import org.apache.hadoop.hbase.zookeeper.ZKConfig;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Adds HBase configuration files to a Configuration
*/
@InterfaceAudience.Public
public class HBaseConfiguration extends Configuration {
- private static final Log LOG = LogFactory.getLog(HBaseConfiguration.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HBaseConfiguration.class);
/**
* Instantiating HBaseConfiguration() is deprecated. Please use
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
index 88e7d88c4d..6a79c88062 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
@@ -35,15 +35,16 @@ import java.util.List;
import java.util.Map;
import java.util.Optional;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.io.RawComparator;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+
/**
* An HBase Key/Value. This is the fundamental HBase Type.
*
@@ -81,7 +82,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
public class KeyValue implements ExtendedCell {
private static final ArrayList EMPTY_ARRAY_LIST = new ArrayList<>();
- private static final Log LOG = LogFactory.getLog(KeyValue.class);
+ private static final Logger LOG = LoggerFactory.getLogger(KeyValue.class);
public static final int FIXED_OVERHEAD = ClassSize.OBJECT + // the KeyValue object itself
ClassSize.REFERENCE + // pointer to "bytes"
@@ -738,9 +739,9 @@ public class KeyValue implements ExtendedCell {
}
public KeyValue(Cell c) {
- this(c.getRowArray(), c.getRowOffset(), (int)c.getRowLength(),
- c.getFamilyArray(), c.getFamilyOffset(), (int)c.getFamilyLength(),
- c.getQualifierArray(), c.getQualifierOffset(), (int) c.getQualifierLength(),
+ this(c.getRowArray(), c.getRowOffset(), c.getRowLength(),
+ c.getFamilyArray(), c.getFamilyOffset(), c.getFamilyLength(),
+ c.getQualifierArray(), c.getQualifierOffset(), c.getQualifierLength(),
c.getTimestamp(), Type.codeToType(c.getTypeByte()), c.getValueArray(), c.getValueOffset(),
c.getValueLength(), c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
this.seqId = c.getSequenceId();
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java
index 927b2b3736..16842da307 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java
@@ -21,10 +21,9 @@ package org.apache.hadoop.hbase;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -42,7 +41,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Public
public abstract class ScheduledChore implements Runnable {
- private static final Log LOG = LogFactory.getLog(ScheduledChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ScheduledChore.class);
private final String name;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java
index b8816ad017..e1a96bdcf4 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java
@@ -23,18 +23,19 @@ import java.io.InputStream;
import java.io.PushbackInputStream;
import edu.umd.cs.findbugs.annotations.NonNull;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* TODO javadoc
*/
@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC, HBaseInterfaceAudience.PHOENIX})
public abstract class BaseDecoder implements Codec.Decoder {
- protected static final Log LOG = LogFactory.getLog(BaseDecoder.class);
+ protected static final Logger LOG = LoggerFactory.getLogger(BaseDecoder.class);
protected final InputStream in;
private Cell current = null;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferListOutputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferListOutputStream.java
index 61cc170dce..0b97abbb7d 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferListOutputStream.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferListOutputStream.java
@@ -23,10 +23,10 @@ import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* An OutputStream which writes data into ByteBuffers. It will try to get ByteBuffer, as and when
@@ -37,7 +37,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class ByteBufferListOutputStream extends ByteBufferOutputStream {
- private static final Log LOG = LogFactory.getLog(ByteBufferListOutputStream.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ByteBufferListOutputStream.class);
private ByteBufferPool pool;
// Keep track of the BBs where bytes written to. We will first try to get a BB from the pool. If
@@ -115,7 +115,7 @@ public class ByteBufferListOutputStream extends ByteBufferOutputStream {
try {
close();
} catch (IOException e) {
- LOG.debug(e);
+ LOG.debug(e.toString(), e);
}
// Return back all the BBs to pool
if (this.bufsFromPool != null) {
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferPool.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferPool.java
index 784c88fcf6..e699ea959c 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferPool.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferPool.java
@@ -22,9 +22,9 @@ import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
@@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public class ByteBufferPool {
- private static final Log LOG = LogFactory.getLog(ByteBufferPool.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ByteBufferPool.class);
// TODO better config names?
// hbase.ipc.server.reservoir.initial.max -> hbase.ipc.server.reservoir.max.buffer.count
// hbase.ipc.server.reservoir.initial.buffer.size -> hbase.ipc.server.reservoir.buffer.size
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java
index ec4ce38d3a..d258ba2927 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java
@@ -23,8 +23,6 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.IOUtils;
@@ -39,6 +37,8 @@ import org.apache.hadoop.io.compress.DoNotPool;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Compression related stuff.
@@ -46,7 +46,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public final class Compression {
- private static final Log LOG = LogFactory.getLog(Compression.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Compression.class);
/**
* Prevent the instantiation of class.
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/ReusableStreamGzipCodec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/ReusableStreamGzipCodec.java
index 3db9d7ec44..05ea39b72a 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/ReusableStreamGzipCodec.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/ReusableStreamGzipCodec.java
@@ -22,14 +22,14 @@ import java.io.OutputStream;
import java.util.Arrays;
import java.util.zip.GZIPOutputStream;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.util.JVM;
import org.apache.hadoop.io.compress.CompressionOutputStream;
import org.apache.hadoop.io.compress.CompressorStream;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.io.compress.zlib.ZlibFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Fixes an inefficiency in Hadoop's Gzip codec, allowing to reuse compression
@@ -38,7 +38,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class ReusableStreamGzipCodec extends GzipCodec {
- private static final Log LOG = LogFactory.getLog(Compression.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Compression.class);
/**
* A bridge that wraps around a DeflaterOutputStream to make it a
@@ -70,7 +70,7 @@ public class ReusableStreamGzipCodec extends GzipCodec {
try {
gzipStream.close();
} catch (IOException e) {
- LOG.error(e);
+ LOG.error(e.toString(), e);
}
}
}
@@ -98,7 +98,7 @@ public class ReusableStreamGzipCodec extends GzipCodec {
*/
@Override
public void finish() throws IOException {
- if (HAS_BROKEN_FINISH) {
+ if (HAS_BROKEN_FINISH) {
if (!def.finished()) {
def.finish();
while (!def.finished()) {
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java
index 49cc61f231..af0089d02c 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java
@@ -27,13 +27,12 @@ import java.security.spec.InvalidKeySpecException;
import java.util.Arrays;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
+
import javax.crypto.SecretKeyFactory;
import javax.crypto.spec.PBEKeySpec;
import javax.crypto.spec.SecretKeySpec;
import org.apache.commons.io.IOUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
@@ -41,6 +40,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A facade for encryption algorithms and related support.
@@ -48,7 +49,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Public
public final class Encryption {
- private static final Log LOG = LogFactory.getLog(Encryption.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Encryption.class);
/**
* Crypto context
@@ -204,7 +205,7 @@ public final class Encryption {
/**
* Return a 128 bit key derived from the concatenation of the supplied
* arguments using PBKDF2WithHmacSHA1 at 10,000 iterations.
- *
+ *
*/
public static byte[] pbkdf128(String... args) {
byte[] salt = new byte[128];
@@ -227,7 +228,7 @@ public final class Encryption {
/**
* Return a 128 bit key derived from the concatenation of the supplied
* arguments using PBKDF2WithHmacSHA1 at 10,000 iterations.
- *
+ *
*/
public static byte[] pbkdf128(byte[]... args) {
byte[] salt = new byte[128];
@@ -420,7 +421,7 @@ public final class Encryption {
*/
public static Key getSecretKeyForSubject(String subject, Configuration conf)
throws IOException {
- KeyProvider provider = (KeyProvider)getKeyProvider(conf);
+ KeyProvider provider = getKeyProvider(conf);
if (provider != null) try {
Key[] keys = provider.getKeys(new String[] { subject });
if (keys != null && keys.length > 0) {
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AES.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AES.java
index 6c73bb4970..93a60241ba 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AES.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AES.java
@@ -23,10 +23,9 @@ import java.io.OutputStream;
import java.security.GeneralSecurityException;
import java.security.Key;
import java.security.SecureRandom;
+
import javax.crypto.spec.SecretKeySpec;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.io.crypto.Cipher;
import org.apache.hadoop.hbase.io.crypto.CipherProvider;
import org.apache.hadoop.hbase.io.crypto.Context;
@@ -34,6 +33,8 @@ import org.apache.hadoop.hbase.io.crypto.Decryptor;
import org.apache.hadoop.hbase.io.crypto.Encryptor;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
@@ -48,7 +49,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
@InterfaceStability.Evolving
public class AES extends Cipher {
- private static final Log LOG = LogFactory.getLog(AES.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AES.class);
public static final String CIPHER_MODE_KEY = "hbase.crypto.algorithm.aes.mode";
public static final String CIPHER_PROVIDER_KEY = "hbase.crypto.algorithm.aes.provider";
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAES.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAES.java
index 412ea7947d..57f3d3e5e4 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAES.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAES.java
@@ -24,11 +24,10 @@ import java.security.GeneralSecurityException;
import java.security.Key;
import java.security.SecureRandom;
import java.util.Properties;
+
import javax.crypto.spec.SecretKeySpec;
import org.apache.commons.crypto.cipher.CryptoCipherFactory;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.io.crypto.Cipher;
import org.apache.hadoop.hbase.io.crypto.CipherProvider;
@@ -37,6 +36,8 @@ import org.apache.hadoop.hbase.io.crypto.Decryptor;
import org.apache.hadoop.hbase.io.crypto.Encryptor;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
@@ -45,7 +46,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
@InterfaceStability.Evolving
public class CommonsCryptoAES extends Cipher {
- private static final Log LOG = LogFactory.getLog(CommonsCryptoAES.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CommonsCryptoAES.class);
public static final String CIPHER_MODE_KEY = "hbase.crypto.commons.mode";
public static final String CIPHER_CLASSES_KEY = "hbase.crypto.commons.cipher.classes";
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexEncoderV1.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexEncoderV1.java
index 92a3a4f089..7dbbdba980 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexEncoderV1.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexEncoderV1.java
@@ -13,16 +13,16 @@ package org.apache.hadoop.hbase.io.encoding;
import java.io.DataOutputStream;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.io.ByteArrayOutputStream;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class RowIndexEncoderV1 {
- private static final Log LOG = LogFactory.getLog(RowIndexEncoderV1.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RowIndexEncoderV1.class);
/** The Cell previously appended. */
private Cell lastCell = null;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/log/HBaseMarkers.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/log/HBaseMarkers.java
new file mode 100644
index 0000000000..ada8ad0ee8
--- /dev/null
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/log/HBaseMarkers.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.log;
+
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Marker;
+import org.slf4j.MarkerFactory;
+
+@InterfaceAudience.Private
+public class HBaseMarkers {
+ public static final Marker FATAL = MarkerFactory.getMarker("FATAL");
+
+ private HBaseMarkers() {
+ }
+}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java
index dceafbd087..c52c764c1e 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java
@@ -23,11 +23,11 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.AuthUtil;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Keeps lists of superusers and super groups loaded from HBase configuration,
@@ -35,7 +35,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public final class Superusers {
- private static final Log LOG = LogFactory.getLog(Superusers.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Superusers.class);
/** Configuration key for superusers */
public static final String SUPERUSER_CONF_KEY = "hbase.superuser"; // Not getting a name
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.java
index b31a4f6d73..03d03d9fe4 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.java
@@ -18,15 +18,15 @@
package org.apache.hadoop.hbase.trace;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.htrace.core.HTraceConfiguration;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class HBaseHTraceConfiguration extends HTraceConfiguration {
- private static final Log LOG = LogFactory.getLog(HBaseHTraceConfiguration.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HBaseHTraceConfiguration.class);
public static final String KEY_PREFIX = "hbase.htrace.";
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java
index 89339c536f..14ef945d75 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java
@@ -21,11 +21,11 @@ import java.io.IOException;
import java.util.Collection;
import java.util.HashSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.htrace.core.SpanReceiver;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This class provides functions for reading the names of SpanReceivers from
@@ -35,7 +35,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class SpanReceiverHost {
public static final String SPAN_RECEIVERS_CONF_KEY = "hbase.trace.spanreceiver.classes";
- private static final Log LOG = LogFactory.getLog(SpanReceiverHost.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SpanReceiverHost.class);
private Collection receivers;
private Configuration conf;
private boolean closed = false;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java
index 21b174e71b..9414e31532 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java
@@ -31,14 +31,14 @@ import org.apache.commons.cli.MissingOptionException;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Common base class used for HBase command-line tools. Simplifies workflow and
@@ -55,7 +55,7 @@ public abstract class AbstractHBaseTool implements Tool, Configurable {
private static final Option HELP_OPTION = new Option("h", "help", false,
"Prints help for this tool.");
- private static final Log LOG = LogFactory.getLog(AbstractHBaseTool.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AbstractHBaseTool.class);
protected final Options options = new Options();
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java
index 91df2d56ac..fe74bcf91c 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java
@@ -39,9 +39,10 @@ import java.nio.charset.StandardCharsets;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Encodes and decodes to and from Base64 notation.
@@ -158,7 +159,7 @@ public class Base64 {
/* ******** P R I V A T E F I E L D S ******** */
- private static final Log LOG = LogFactory.getLog(Base64.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Base64.class);
/** Maximum line length (76) of Base64 output. */
private final static int MAX_LINE_LENGTH = 76;
@@ -1100,7 +1101,7 @@ public class Base64 {
// Check the size of file
if (file.length() > Integer.MAX_VALUE) {
- LOG.fatal("File is too big for this convenience method (" +
+ LOG.error(HBaseMarkers.FATAL, "File is too big for this convenience method (" +
file.length() + " bytes).");
return null;
} // end if: file too big for int index
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java
index b2e5c9b751..6782de6bd0 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java
@@ -28,14 +28,15 @@ import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
-import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.nio.ByteBuff;
import org.apache.hadoop.hbase.nio.MultiByteBuff;
import org.apache.hadoop.hbase.nio.SingleByteBuff;
import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.annotations.VisibleForTesting;
/**
* This class manages an array of ByteBuffers with a default size 4MB. These
@@ -44,7 +45,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class ByteBufferArray {
- private static final Log LOG = LogFactory.getLog(ByteBufferArray.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ByteBufferArray.class);
public static final int DEFAULT_BUFFER_SIZE = 4 * 1024 * 1024;
@VisibleForTesting
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
index c32649b067..159a7905b9 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
@@ -36,9 +36,6 @@ import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
-import com.google.protobuf.ByteString;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
@@ -46,11 +43,16 @@ import org.apache.hadoop.io.RawComparator;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.io.WritableUtils;
import org.apache.yetus.audience.InterfaceAudience;
-import sun.misc.Unsafe;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
+import com.google.protobuf.ByteString;
+
+import sun.misc.Unsafe;
+
/**
* Utility class that handles byte arrays, conversions to/from other types,
* comparisons, hash code generation, manufacturing keys for HashMaps or
@@ -70,7 +72,7 @@ public class Bytes implements Comparable {
//HConstants.EMPTY_BYTE_ARRAY should be updated if this changed
private static final byte [] EMPTY_BYTE_ARRAY = new byte [0];
- private static final Log LOG = LogFactory.getLog(Bytes.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Bytes.class);
/**
* Size of boolean in bytes
@@ -1533,8 +1535,8 @@ public class Bytes implements Comparable {
final int stride = 8;
final int minLength = Math.min(length1, length2);
int strideLimit = minLength & ~(stride - 1);
- final long offset1Adj = (long) offset1 + UnsafeAccess.BYTE_ARRAY_BASE_OFFSET;
- final long offset2Adj = (long) offset2 + UnsafeAccess.BYTE_ARRAY_BASE_OFFSET;
+ final long offset1Adj = offset1 + UnsafeAccess.BYTE_ARRAY_BASE_OFFSET;
+ final long offset2Adj = offset2 + UnsafeAccess.BYTE_ARRAY_BASE_OFFSET;
int i;
/*
@@ -1542,8 +1544,8 @@ public class Bytes implements Comparable {
* than 4 bytes even on 32-bit. On the other hand, it is substantially faster on 64-bit.
*/
for (i = 0; i < strideLimit; i += stride) {
- long lw = theUnsafe.getLong(buffer1, offset1Adj + (long) i);
- long rw = theUnsafe.getLong(buffer2, offset2Adj + (long) i);
+ long lw = theUnsafe.getLong(buffer1, offset1Adj + i);
+ long rw = theUnsafe.getLong(buffer2, offset2Adj + i);
if (lw != rw) {
if(!UnsafeAccess.littleEndian) {
return ((lw + Long.MIN_VALUE) < (rw + Long.MIN_VALUE)) ? -1 : 1;
@@ -1936,7 +1938,7 @@ public class Bytes implements Comparable {
public static int hashCode(byte[] bytes, int offset, int length) {
int hash = 1;
for (int i = offset; i < offset + length; i++)
- hash = (31 * hash) + (int) bytes[i];
+ hash = (31 * hash) + bytes[i];
return hash;
}
@@ -2517,7 +2519,7 @@ public class Bytes implements Comparable {
}
return new String(ch);
}
-
+
/**
* Convert a byte array into a hex string
*/
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
index efcf8d0bec..9a7f0ef70e 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
@@ -25,10 +25,9 @@ import java.lang.reflect.Modifier;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentSkipListMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
@@ -40,7 +39,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public class ClassSize {
- private static final Log LOG = LogFactory.getLog(ClassSize.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ClassSize.class);
/** Array overhead */
public static final int ARRAY;
@@ -197,7 +196,7 @@ public class ClassSize {
return (int) UnsafeAccess.theUnsafe.objectFieldOffset(
HeaderSize.class.getDeclaredField("a"));
} catch (NoSuchFieldException | SecurityException e) {
- LOG.error(e);
+ LOG.error(e.toString(), e);
}
return super.headerSize();
}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java
index 0e0372c94e..5a6625bd4d 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java
@@ -29,8 +29,6 @@ import java.util.Locale;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -45,6 +43,8 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@@ -54,7 +54,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
*/
@InterfaceAudience.Private
public abstract class CommonFSUtils {
- private static final Log LOG = LogFactory.getLog(CommonFSUtils.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CommonFSUtils.class);
/** Parameter name for HBase WAL directory */
public static final String HBASE_WAL_DIR = "hbase.wal.dir";
@@ -733,7 +733,7 @@ public abstract class CommonFSUtils {
* @param LOG log to output information
* @throws IOException if an unexpected exception occurs
*/
- public static void logFileSystemState(final FileSystem fs, final Path root, Log LOG)
+ public static void logFileSystemState(final FileSystem fs, final Path root, Logger LOG)
throws IOException {
LOG.debug("File system contents for path " + root);
logFSTree(LOG, fs, root, "|-");
@@ -742,9 +742,9 @@ public abstract class CommonFSUtils {
/**
* Recursive helper to log the state of the FS
*
- * @see #logFileSystemState(FileSystem, Path, Log)
+ * @see #logFileSystemState(FileSystem, Path, Logger)
*/
- private static void logFSTree(Log LOG, final FileSystem fs, final Path root, String prefix)
+ private static void logFSTree(Logger LOG, final FileSystem fs, final Path root, String prefix)
throws IOException {
FileStatus[] files = listStatus(fs, root, null);
if (files == null) {
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java
index 15828ed8ee..283d59a17e 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java
@@ -34,8 +34,6 @@ import java.util.jar.JarFile;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -43,6 +41,8 @@ import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker;
@@ -76,7 +76,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker;
*/
@InterfaceAudience.Private
public class CoprocessorClassLoader extends ClassLoaderBase {
- private static final Log LOG = LogFactory.getLog(CoprocessorClassLoader.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CoprocessorClassLoader.class);
// A temporary place ${hbase.local.dir}/jars/tmp/ to store the local
// copy of the jar file and the libraries contained in the jar.
@@ -108,7 +108,7 @@ public class CoprocessorClassLoader extends ClassLoaderBase {
"org.xml",
"sunw.",
// logging
- "org.apache.commons.logging",
+ "org.slf4j",
"org.apache.log4j",
"com.hadoop",
// HBase:
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java
index 75dcf5f0d2..28fce21b1c 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java
@@ -23,13 +23,13 @@ import java.net.MalformedURLException;
import java.net.URL;
import java.util.HashMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This is a class loader that can load classes dynamically from new
@@ -57,8 +57,8 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class DynamicClassLoader extends ClassLoaderBase {
- private static final Log LOG =
- LogFactory.getLog(DynamicClassLoader.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(DynamicClassLoader.class);
// Dynamic jars are put under ${hbase.local.dir}/jars/
private static final String DYNAMIC_JARS_DIR = File.separator
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java
index 532f8419b4..ab95b31263 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java
@@ -25,6 +25,7 @@ import java.lang.reflect.Array;
import java.nio.charset.StandardCharsets;
import java.util.Iterator;
import java.util.Set;
+
import javax.management.AttributeNotFoundException;
import javax.management.InstanceNotFoundException;
import javax.management.IntrospectionException;
@@ -41,17 +42,18 @@ import javax.management.openmbean.CompositeData;
import javax.management.openmbean.CompositeType;
import javax.management.openmbean.TabularData;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.core.JsonGenerator;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
/**
* Utility for doing JSON and MBeans.
*/
public class JSONBean {
- private static final Log LOG = LogFactory.getLog(JSONBean.class);
+ private static final Logger LOG = LoggerFactory.getLogger(JSONBean.class);
private final JsonFactory jsonFactory;
public JSONBean() {
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java
index ae967a16e4..b6c05b67c9 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java
@@ -28,6 +28,7 @@ import java.lang.management.RuntimeMXBean;
import java.util.Hashtable;
import java.util.List;
import java.util.Set;
+
import javax.management.InstanceNotFoundException;
import javax.management.MBeanAttributeInfo;
import javax.management.MBeanInfo;
@@ -37,15 +38,16 @@ import javax.management.ObjectName;
import javax.management.ReflectionException;
import javax.management.openmbean.CompositeData;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
public final class JSONMetricUtil {
- private static final Log LOG = LogFactory.getLog(JSONMetricUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(JSONMetricUtil.class);
private static MBeanServer mbServer = ManagementFactory.getPlatformMBeanServer();
//MBeans ObjectName domain names
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java
index ab966f17b4..6657481ed0 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java
@@ -28,9 +28,9 @@ import java.lang.management.RuntimeMXBean;
import java.lang.reflect.Method;
import java.nio.charset.StandardCharsets;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
@@ -42,7 +42,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class JVM {
- private static final Log LOG = LogFactory.getLog(JVM.class);
+ private static final Logger LOG = LoggerFactory.getLogger(JVM.class);
private OperatingSystemMXBean osMbean;
private static final boolean ibmvendor =
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MD5Hash.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MD5Hash.java
index 804c1cdf45..c38f1a9f8b 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MD5Hash.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MD5Hash.java
@@ -23,9 +23,9 @@ import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import org.apache.commons.codec.binary.Hex;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Utility class for MD5
@@ -33,7 +33,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Public
public class MD5Hash {
- private static final Log LOG = LogFactory.getLog(MD5Hash.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MD5Hash.class);
/**
* Given a byte array, returns in MD5 hash as a hex string.
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Methods.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Methods.java
index 296dc643d2..6e472a0714 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Methods.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Methods.java
@@ -24,13 +24,14 @@ import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.UndeclaredThrowableException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class Methods {
- private static final Log LOG = LogFactory.getLog(Methods.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Methods.class);
public static Object call(Class clazz, T instance, String methodName,
Class[] types, Object[] args) throws Exception {
@@ -38,7 +39,7 @@ public class Methods {
Method m = clazz.getMethod(methodName, types);
return m.invoke(instance, args);
} catch (IllegalArgumentException arge) {
- LOG.fatal("Constructed invalid call. class="+clazz.getName()+
+ LOG.error(HBaseMarkers.FATAL, "Constructed invalid call. class="+clazz.getName()+
" method=" + methodName + " types=" + Classes.stringify(types), arge);
throw arge;
} catch (NoSuchMethodException nsme) {
@@ -59,8 +60,9 @@ public class Methods {
throw new IllegalArgumentException(
"Denied access calling "+clazz.getName()+"."+methodName+"()", iae);
} catch (SecurityException se) {
- LOG.fatal("SecurityException calling method. class="+clazz.getName()+
- " method=" + methodName + " types=" + Classes.stringify(types), se);
+ LOG.error(HBaseMarkers.FATAL, "SecurityException calling method. class="+
+ clazz.getName()+" method=" + methodName + " types=" +
+ Classes.stringify(types), se);
throw se;
}
}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PrettyPrinter.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PrettyPrinter.java
index 4f4b775af0..147e9160f9 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PrettyPrinter.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PrettyPrinter.java
@@ -22,16 +22,16 @@ package org.apache.hadoop.hbase.util;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.exceptions.HBaseException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class PrettyPrinter {
- private static final Log LOG = LogFactory.getLog(PrettyPrinter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(PrettyPrinter.class);
private static final String INTERVAL_REGEX = "((\\d+)\\s*SECONDS?\\s*\\()?\\s*" +
"((\\d+)\\s*DAYS?)?\\s*((\\d+)\\s*HOURS?)?\\s*" +
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java
index 6430d2eb12..a136846a92 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java
@@ -29,9 +29,10 @@ import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.nio.charset.Charset;
-import edu.umd.cs.findbugs.annotations.NonNull;
-import org.apache.commons.logging.Log;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+
+import edu.umd.cs.findbugs.annotations.NonNull;
@InterfaceAudience.Private
public class ReflectionUtils {
@@ -111,7 +112,7 @@ public class ReflectionUtils {
* @param title a descriptive title for the call stacks
* @param minInterval the minimum time from the last
*/
- public static void logThreadInfo(Log log,
+ public static void logThreadInfo(Logger log,
String title,
long minInterval) {
boolean dumpStack = false;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RetryCounter.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RetryCounter.java
index 116b6cce7d..ffcd9cab33 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RetryCounter.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RetryCounter.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.util;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class RetryCounter {
@@ -127,7 +127,7 @@ public class RetryCounter {
}
}
- private static final Log LOG = LogFactory.getLog(RetryCounter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RetryCounter.class);
private RetryConfig retryConfig;
private int attempts;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java
index 326da2e69c..7d4d692e1a 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java
@@ -18,10 +18,10 @@
*/
package org.apache.hadoop.hbase.util;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Sleeper for current thread.
@@ -31,7 +31,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class Sleeper {
- private static final Log LOG = LogFactory.getLog(Sleeper.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Sleeper.class);
private final int period;
private final Stoppable stopper;
private static final long MINIMAL_DELTA_FOR_LOGGING = 10000;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java
index 4e2f09f611..5f64f6329d 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java
@@ -31,11 +31,11 @@ import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
@@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
*/
@InterfaceAudience.Private
public class Threads {
- private static final Log LOG = LogFactory.getLog(Threads.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Threads.class);
private static final AtomicInteger poolNumber = new AtomicInteger(1);
public static final UncaughtExceptionHandler LOGGING_EXCEPTION_HANDLER =
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java
index 50fef6d823..feaa9e6624 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java
@@ -23,10 +23,11 @@ import java.nio.ByteOrder;
import java.security.AccessController;
import java.security.PrivilegedAction;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import sun.misc.Unsafe;
import sun.nio.ch.DirectBuffer;
@@ -34,7 +35,7 @@ import sun.nio.ch.DirectBuffer;
@InterfaceStability.Evolving
public final class UnsafeAccess {
- private static final Log LOG = LogFactory.getLog(UnsafeAccess.class);
+ private static final Logger LOG = LoggerFactory.getLogger(UnsafeAccess.class);
static final Unsafe theUnsafe;
@@ -325,7 +326,7 @@ public final class UnsafeAccess {
destAddress = destAddress + BYTE_ARRAY_BASE_OFFSET + dest.arrayOffset();
destBase = dest.array();
}
- long srcAddress = (long) srcOffset + BYTE_ARRAY_BASE_OFFSET;
+ long srcAddress = srcOffset + BYTE_ARRAY_BASE_OFFSET;
unsafeCopy(src, srcAddress, destBase, destAddress, length);
}
@@ -359,7 +360,7 @@ public final class UnsafeAccess {
srcAddress = srcAddress + BYTE_ARRAY_BASE_OFFSET + src.arrayOffset();
srcBase = src.array();
}
- long destAddress = (long) destOffset + BYTE_ARRAY_BASE_OFFSET;
+ long destAddress = destOffset + BYTE_ARRAY_BASE_OFFSET;
unsafeCopy(srcBase, srcAddress, dest, destAddress, length);
}
@@ -386,7 +387,7 @@ public final class UnsafeAccess {
if (dest.isDirect()) {
destAddress = destOffset + ((DirectBuffer) dest).address();
} else {
- destAddress = (long) destOffset + BYTE_ARRAY_BASE_OFFSET + dest.arrayOffset();
+ destAddress = destOffset + BYTE_ARRAY_BASE_OFFSET + dest.arrayOffset();
destBase = dest.array();
}
unsafeCopy(srcBase, srcAddress, destBase, destAddress, length);
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAvailChecker.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAvailChecker.java
index 8fe70443ae..88dd524296 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAvailChecker.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAvailChecker.java
@@ -22,15 +22,15 @@ import java.lang.reflect.Method;
import java.security.AccessController;
import java.security.PrivilegedAction;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class UnsafeAvailChecker {
private static final String CLASS_NAME = "sun.misc.Unsafe";
- private static final Log LOG = LogFactory.getLog(UnsafeAvailChecker.class);
+ private static final Logger LOG = LoggerFactory.getLogger(UnsafeAvailChecker.class);
private static boolean avail = false;
private static boolean unaligned = false;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java
index 07b9c5880a..9d9b563062 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java
@@ -21,17 +21,17 @@ package org.apache.hadoop.hbase.util;
import java.io.PrintStream;
import java.io.PrintWriter;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Version;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This class finds the Version information for HBase.
*/
@InterfaceAudience.Public
public class VersionInfo {
- private static final Log LOG = LogFactory.getLog(VersionInfo.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(VersionInfo.class.getName());
// If between two dots there is not a number, we regard it as a very large number so it is
// higher than any numbers in the version.
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java
index cc0e9691d7..9dccb291fb 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java
@@ -33,8 +33,8 @@ import java.util.jar.JarInputStream;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A class that finds a set of classes that are locally accessible
@@ -42,7 +42,7 @@ import org.apache.commons.logging.LogFactory;
* imposed by name and class filters provided by the user.
*/
public class ClassFinder {
- private static final Log LOG = LogFactory.getLog(ClassFinder.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ClassFinder.class);
private static String CLASS_EXT = ".class";
private ResourcePathFilter resourcePathFilter;
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
index d84e8ec5c4..693f9b2f74 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
@@ -25,13 +25,13 @@ import java.util.List;
import java.util.UUID;
import org.apache.commons.io.FileUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Waiter.Predicate;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Common helpers for testing HBase that do not depend on specific server/etc. things.
@@ -39,7 +39,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Public
public class HBaseCommonTestingUtility {
- protected static final Log LOG = LogFactory.getLog(HBaseCommonTestingUtility.class);
+ protected static final Logger LOG = LoggerFactory.getLogger(HBaseCommonTestingUtility.class);
/** Compression algorithms to use in parameterized JUnit 4 tests */
public static final List COMPRESSION_ALGORITHMS_PARAMETERIZED =
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceChecker.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceChecker.java
index c0b98362c5..b42db95cfc 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceChecker.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceChecker.java
@@ -22,8 +22,8 @@ package org.apache.hadoop.hbase;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Utility class to check the resources:
@@ -32,7 +32,7 @@ import org.apache.commons.logging.LogFactory;
* - check that they don't leak during the test
*/
public class ResourceChecker {
- private static final Log LOG = LogFactory.getLog(ResourceChecker.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ResourceChecker.class);
private String tagLine;
enum Phase {
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java
index 7d85b97cad..a28bad865d 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java
@@ -24,8 +24,6 @@ import static org.junit.Assert.assertTrue;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TestChoreService.ScheduledChoreSamples.CountingChore;
import org.apache.hadoop.hbase.TestChoreService.ScheduledChoreSamples.DoNothingChore;
import org.apache.hadoop.hbase.TestChoreService.ScheduledChoreSamples.FailInitialChore;
@@ -35,10 +33,12 @@ import org.apache.hadoop.hbase.TestChoreService.ScheduledChoreSamples.SlowChore;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category(SmallTests.class)
public class TestChoreService {
- public static final Log log = LogFactory.getLog(TestChoreService.class);
+ public static final Logger log = LoggerFactory.getLogger(TestChoreService.class);
/**
* A few ScheduledChore samples that are useful for testing with ChoreService
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java
index d37d731161..e9c3e604a5 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java
@@ -41,8 +41,6 @@ import java.util.jar.Manifest;
import javax.tools.JavaCompiler;
import javax.tools.ToolProvider;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.AfterClass;
@@ -51,11 +49,13 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MiscTests.class, SmallTests.class})
public class TestClassFinder {
- private static final Log LOG = LogFactory.getLog(TestClassFinder.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestClassFinder.class);
@Rule public TestName name = new TestName();
private static final HBaseCommonTestingUtility testUtil = new HBaseCommonTestingUtility();
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java
index abb6a2825a..309480ab54 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java
@@ -28,21 +28,20 @@ import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.AfterClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap;
@Category({MiscTests.class, SmallTests.class})
public class TestHBaseConfiguration {
- private static final Log LOG = LogFactory.getLog(TestHBaseConfiguration.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHBaseConfiguration.class);
private static HBaseCommonTestingUtility UTIL = new HBaseCommonTestingUtility();
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
index c6b726527b..085f357d64 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
@@ -31,15 +31,16 @@ import java.util.List;
import java.util.Set;
import java.util.TreeSet;
-import junit.framework.TestCase;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import junit.framework.TestCase;
public class TestKeyValue extends TestCase {
- private static final Log LOG = LogFactory.getLog(TestKeyValue.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestKeyValue.class);
public void testColumnCompare() throws Exception {
final byte [] a = Bytes.toBytes("aaa");
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/Waiter.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/Waiter.java
index 6ee52cb5c9..70750056b1 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/Waiter.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/Waiter.java
@@ -23,10 +23,10 @@ import static org.junit.Assert.fail;
import java.text.MessageFormat;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A class that provides a standard waitFor pattern
@@ -35,7 +35,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public final class Waiter {
- private static final Log LOG = LogFactory.getLog(Waiter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Waiter.class);
/**
* System property name whose value is a scale factor to increase time out values dynamically used
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java
index 07dd601d27..8ae20d5cd9 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java
@@ -24,8 +24,6 @@ import java.io.ByteArrayOutputStream;
import java.security.Key;
import javax.crypto.spec.SecretKeySpec;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
@@ -34,11 +32,13 @@ import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MiscTests.class, SmallTests.class})
public class TestEncryption {
- private static final Log LOG = LogFactory.getLog(TestEncryption.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestEncryption.class);
@Test
public void testSmallBlocks() throws Exception {
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java
index 790568e26a..2fe8a8f6d6 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java
@@ -29,19 +29,19 @@ import java.security.MessageDigest;
import java.util.Properties;
import javax.crypto.spec.SecretKeySpec;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MiscTests.class, SmallTests.class})
public class TestKeyStoreKeyProvider {
- private static final Log LOG = LogFactory.getLog(TestKeyStoreKeyProvider.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestKeyStoreKeyProvider.class);
static final HBaseCommonTestingUtility TEST_UTIL = new HBaseCommonTestingUtility();
static final String ALIAS = "test";
static final String PASSWORD = "password";
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java
index 17471bb64d..0705a36571 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java
@@ -35,16 +35,16 @@ import javax.tools.JavaFileObject;
import javax.tools.StandardJavaFileManager;
import javax.tools.ToolProvider;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Some utilities to help class loader testing
*/
public class ClassLoaderTestHelper {
- private static final Log LOG = LogFactory.getLog(ClassLoaderTestHelper.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ClassLoaderTestHelper.class);
private static final int BUFFER_SIZE = 4096;
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/LoadTestKVGenerator.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/LoadTestKVGenerator.java
index e46e89b76e..ad49e55371 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/LoadTestKVGenerator.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/LoadTestKVGenerator.java
@@ -18,9 +18,9 @@ package org.apache.hadoop.hbase.util;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A generator of random keys and values for load testing. Keys are generated
@@ -32,7 +32,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class LoadTestKVGenerator {
- private static final Log LOG = LogFactory.getLog(LoadTestKVGenerator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(LoadTestKVGenerator.class);
private static int logLimit = 10;
/** A random number generator for determining value size */
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCommonFSUtils.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCommonFSUtils.java
index 7dd27d4804..a3330af134 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCommonFSUtils.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCommonFSUtils.java
@@ -26,8 +26,6 @@ import static org.junit.Assert.assertTrue;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
@@ -38,13 +36,15 @@ import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test {@link CommonFSUtils}.
*/
@Category({MiscTests.class, MediumTests.class})
public class TestCommonFSUtils {
- private static final Log LOG = LogFactory.getLog(TestCommonFSUtils.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestCommonFSUtils.class);
private HBaseCommonTestingUtility htu;
private Configuration conf;
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestDynamicClassLoader.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestDynamicClassLoader.java
index 5bdb668a2d..5b80cd8fea 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestDynamicClassLoader.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestDynamicClassLoader.java
@@ -23,8 +23,6 @@ import static org.junit.Assert.fail;
import java.io.File;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.testclassification.MiscTests;
@@ -32,13 +30,15 @@ import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test TestDynamicClassLoader
*/
@Category({MiscTests.class, SmallTests.class})
public class TestDynamicClassLoader {
- private static final Log LOG = LogFactory.getLog(TestDynamicClassLoader.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestDynamicClassLoader.class);
private static final HBaseCommonTestingUtility TEST_UTIL = new HBaseCommonTestingUtility();
private Configuration conf;
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestShowProperties.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestShowProperties.java
index 2f16ee8f7f..6b6d9b072b 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestShowProperties.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestShowProperties.java
@@ -20,12 +20,12 @@ package org.apache.hadoop.hbase.util;
import java.util.Properties;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
@@ -34,7 +34,7 @@ import org.junit.experimental.categories.Category;
*/
@Category({MiscTests.class, SmallTests.class})
public class TestShowProperties {
- private static final Log LOG = LogFactory.getLog(TestShowProperties.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestShowProperties.class);
@Test
public void testShowProperty() {
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestThreads.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestThreads.java
index a628e98ee9..e6a06c1c7b 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestThreads.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestThreads.java
@@ -22,16 +22,16 @@ import static org.junit.Assert.assertTrue;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MiscTests.class, SmallTests.class})
public class TestThreads {
- private static final Log LOG = LogFactory.getLog(TestThreads.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestThreads.class);
private static final int SLEEP_TIME_MS = 3000;
private static final int TOLERANCE_MS = (int) (0.10 * SLEEP_TIME_MS);
diff --git a/hbase-endpoint/pom.xml b/hbase-endpoint/pom.xml
index c6e76cec54..ceeb277549 100644
--- a/hbase-endpoint/pom.xml
+++ b/hbase-endpoint/pom.xml
@@ -208,8 +208,8 @@
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apiorg.apache.curator
diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java
index 4535cab129..7b071f412c 100644
--- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java
+++ b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java
@@ -38,13 +38,13 @@ import java.util.NavigableSet;
import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Result;
@@ -83,7 +83,7 @@ import org.apache.hadoop.hbase.util.Pair;
@InterfaceAudience.Public
public class AggregationClient implements Closeable {
// TODO: This class is not used. Move to examples?
- private static final Log log = LogFactory.getLog(AggregationClient.class);
+ private static final Logger log = LoggerFactory.getLogger(AggregationClient.class);
private final Connection connection;
/**
diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java
index 6e0e6d447a..6beb3f66fd 100644
--- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java
+++ b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java
@@ -33,11 +33,11 @@ import java.util.Collections;
import java.util.List;
import java.util.NavigableSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
@@ -62,7 +62,7 @@ import org.apache.hadoop.hbase.regionserver.InternalScanner;
@InterfaceAudience.Private
public class AggregateImplementation
extends AggregateService implements RegionCoprocessor {
- protected static final Log log = LogFactory.getLog(AggregateImplementation.class);
+ protected static final Logger log = LoggerFactory.getLogger(AggregateImplementation.class);
private RegionCoprocessorEnvironment env;
/**
diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java
index 66e9e044f6..f642d610b7 100644
--- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java
+++ b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java
@@ -29,8 +29,6 @@ import java.util.Map;
import java.util.TreeMap;
import org.apache.commons.lang3.ArrayUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -75,7 +73,8 @@ import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import com.google.protobuf.RpcCallback;
@@ -93,7 +92,7 @@ import com.google.protobuf.Service;
@InterfaceStability.Evolving
public class Export extends ExportProtos.ExportService implements RegionCoprocessor {
- private static final Log LOG = LogFactory.getLog(Export.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Export.class);
private static final Class extends CompressionCodec> DEFAULT_CODEC = DefaultCodec.class;
private static final SequenceFile.CompressionType DEFAULT_TYPE = SequenceFile.CompressionType.RECORD;
private RegionCoprocessorEnvironment env = null;
@@ -341,7 +340,7 @@ public class Export extends ExportProtos.ExportService implements RegionCoproces
done.run(response);
} catch (IOException e) {
CoprocessorRpcUtils.setControllerException(controller, e);
- LOG.error(e);
+ LOG.error(e.toString(), e);
}
}
diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java
index 69d8491f9a..18c932e22a 100644
--- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java
+++ b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java
@@ -23,8 +23,6 @@ import java.util.Collections;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.CoreCoprocessor;
@@ -51,6 +49,8 @@ import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
import com.google.protobuf.Service;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Coprocessor service for bulk loads in secure mode.
@@ -63,7 +63,7 @@ public class SecureBulkLoadEndpoint extends SecureBulkLoadService implements Reg
public static final long VERSION = 0L;
- private static final Log LOG = LogFactory.getLog(SecureBulkLoadEndpoint.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SecureBulkLoadEndpoint.class);
private RegionCoprocessorEnvironment env;
private RegionServerServices rsServices;
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java
index cfcb565cc3..fd570e7854 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java
@@ -23,8 +23,6 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
@@ -35,6 +33,8 @@ import org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationP
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.util.Bytes;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
@@ -45,7 +45,7 @@ import com.google.protobuf.Service;
*/
public class ColumnAggregationEndpoint extends ColumnAggregationService
implements RegionCoprocessor {
- private static final Log LOG = LogFactory.getLog(ColumnAggregationEndpoint.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ColumnAggregationEndpoint.class);
private RegionCoprocessorEnvironment env = null;
@Override
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java
index 80316d3984..5effbe9fa4 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java
@@ -22,8 +22,6 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
@@ -36,6 +34,8 @@ import org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationW
import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.util.Bytes;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
@@ -48,7 +48,9 @@ import com.google.protobuf.Service;
*/
public class ColumnAggregationEndpointNullResponse
extends ColumnAggregationServiceNullResponse implements RegionCoprocessor {
- private static final Log LOG = LogFactory.getLog(ColumnAggregationEndpointNullResponse.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(ColumnAggregationEndpointNullResponse.class);
+
private RegionCoprocessorEnvironment env = null;
@Override
public Iterable getServices() {
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java
index 49b79ce51f..39e3b12cd2 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java
@@ -22,8 +22,6 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
@@ -37,6 +35,8 @@ import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.util.Bytes;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
@@ -50,7 +50,9 @@ import com.google.protobuf.Service;
public class ColumnAggregationEndpointWithErrors
extends ColumnAggregationWithErrorsProtos.ColumnAggregationServiceWithErrors
implements RegionCoprocessor {
- private static final Log LOG = LogFactory.getLog(ColumnAggregationEndpointWithErrors.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(ColumnAggregationEndpointWithErrors.class);
+
private RegionCoprocessorEnvironment env = null;
@Override
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java
index eecf7a3f0c..5433792986 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java
@@ -25,8 +25,6 @@ import java.util.Collections;
import java.util.Map;
import java.util.TreeMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -51,6 +49,8 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.google.protobuf.ByteString;
import com.google.protobuf.ServiceException;
@@ -60,7 +60,7 @@ import com.google.protobuf.ServiceException;
*/
@Category({CoprocessorTests.class, MediumTests.class})
public class TestBatchCoprocessorEndpoint {
- private static final Log LOG = LogFactory.getLog(TestBatchCoprocessorEndpoint.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestBatchCoprocessorEndpoint.class);
private static final TableName TEST_TABLE =
TableName.valueOf("TestTable");
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
index e52e032823..37e5a78cb1 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
@@ -18,8 +18,6 @@
*/
package org.apache.hadoop.hbase.coprocessor;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
@@ -48,6 +46,8 @@ import java.util.*;
import org.junit.*;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
@@ -59,7 +59,7 @@ import static org.junit.Assert.assertFalse;
*/
@Category({CoprocessorTests.class, MediumTests.class})
public class TestClassLoading {
- private static final Log LOG = LogFactory.getLog(TestClassLoading.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestClassLoading.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
public static class TestMasterCoprocessor implements MasterCoprocessor, MasterObserver {
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java
index adfd8d53e5..376c071586 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java
@@ -30,8 +30,6 @@ import java.util.List;
import java.util.Map;
import java.util.TreeMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -59,6 +57,8 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.google.protobuf.RpcController;
import com.google.protobuf.ServiceException;
@@ -68,7 +68,7 @@ import com.google.protobuf.ServiceException;
*/
@Category({CoprocessorTests.class, MediumTests.class})
public class TestCoprocessorEndpoint {
- private static final Log LOG = LogFactory.getLog(TestCoprocessorEndpoint.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestCoprocessorEndpoint.class);
private static final TableName TEST_TABLE =
TableName.valueOf("TestCoprocessorEndpoint");
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
index 8c11192677..8a79400bcb 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
@@ -74,10 +74,10 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.google.protobuf.Message;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
/**
* Verifies ProcessEndpoint works.
@@ -86,7 +86,7 @@ import org.apache.commons.logging.LogFactory;
@Category({CoprocessorTests.class, MediumTests.class})
public class TestRowProcessorEndpoint {
- private static final Log LOG = LogFactory.getLog(TestRowProcessorEndpoint.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRowProcessorEndpoint.class);
private static final TableName TABLE = TableName.valueOf("testtable");
private final static byte[] ROW = Bytes.toBytes("testrow");
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
index e4cd54d581..76ef82504e 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
@@ -27,8 +27,7 @@ import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -82,10 +81,12 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MediumTests.class})
public class TestSecureExport {
- private static final Log LOG = LogFactory.getLog(TestSecureExport.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSecureExport.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static MiniKdc KDC;
private static final File KEYTAB_FILE = new File(UTIL.getDataTestDir("keytab").toUri().getPath());
@@ -311,7 +312,7 @@ public class TestSecureExport {
} catch (ServiceException | IOException ex) {
throw ex;
} catch (Throwable ex) {
- LOG.error(ex);
+ LOG.error(ex.toString(), ex);
throw new Exception(ex);
} finally {
clearOutput(output);
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java
index 25953bc2a9..2daacde5a0 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java
@@ -22,8 +22,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HConstants;
@@ -49,7 +47,8 @@ import org.junit.Ignore;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
/**
@@ -64,8 +63,8 @@ public class TestHRegionServerBulkLoadWithOldSecureEndpoint extends TestHRegionS
super(duration);
}
- private static final Log LOG =
- LogFactory.getLog(TestHRegionServerBulkLoadWithOldSecureEndpoint.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestHRegionServerBulkLoadWithOldSecureEndpoint.class);
@BeforeClass
public static void setUpBeforeClass() throws IOException {
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java
index bad01f3eae..0b17abf0dc 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java
@@ -26,8 +26,6 @@ import java.io.IOException;
import java.util.Collections;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HRegionLocation;
@@ -61,6 +59,8 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
@@ -69,7 +69,7 @@ import com.google.protobuf.ServiceException;
@Category({RegionServerTests.class, MediumTests.class})
public class TestServerCustomProtocol {
- private static final Log LOG = LogFactory.getLog(TestServerCustomProtocol.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestServerCustomProtocol.class);
static final String WHOAREYOU = "Who are you?";
static final String NOBODY = "nobody";
static final String HELLO = "Hello, ";
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpToolWithBulkLoadedData.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpToolWithBulkLoadedData.java
index 0b33d20e7d..1b74b7d724 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpToolWithBulkLoadedData.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpToolWithBulkLoadedData.java
@@ -21,8 +21,6 @@ import java.util.List;
import java.util.Set;
import java.util.UUID;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HConstants;
@@ -37,12 +35,14 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.HFileTestUtil;
import org.junit.BeforeClass;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ ReplicationTests.class, LargeTests.class })
public class TestReplicationSyncUpToolWithBulkLoadedData extends TestReplicationSyncUpTool {
- private static final Log LOG = LogFactory
- .getLog(TestReplicationSyncUpToolWithBulkLoadedData.class);
+ private static final Logger LOG = LoggerFactory
+ .getLogger(TestReplicationSyncUpToolWithBulkLoadedData.class);
@BeforeClass
public static void setUpBeforeClass() throws Exception {
diff --git a/hbase-examples/pom.xml b/hbase-examples/pom.xml
index 2b94633129..e86d4f1850 100644
--- a/hbase-examples/pom.xml
+++ b/hbase-examples/pom.xml
@@ -158,8 +158,8 @@
commons-io
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apiorg.apache.zookeeper
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/AsyncClientExample.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/AsyncClientExample.java
index 67aba62fc6..63d00fb27e 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/AsyncClientExample.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/AsyncClientExample.java
@@ -25,8 +25,6 @@ import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.IntStream;
import org.apache.commons.io.IOUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.AsyncConnection;
@@ -38,13 +36,15 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A simple example shows how to use asynchronous client.
*/
public class AsyncClientExample extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(AsyncClientExample.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AsyncClientExample.class);
/**
* The size for thread pool.
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/BufferedMutatorExample.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/BufferedMutatorExample.java
index 7b11684b4e..8e8a828870 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/BufferedMutatorExample.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/BufferedMutatorExample.java
@@ -18,8 +18,17 @@
*/
package org.apache.hadoop.hbase.client.example;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.BufferedMutator;
@@ -31,24 +40,15 @@ import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* An example of using the {@link BufferedMutator} interface.
*/
public class BufferedMutatorExample extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(BufferedMutatorExample.class);
+ private static final Logger LOG = LoggerFactory.getLogger(BufferedMutatorExample.class);
private static final int POOL_SIZE = 10;
private static final int TASK_COUNT = 100;
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java
index e460316515..53472bb8d1 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java
@@ -18,7 +18,6 @@
*/
package org.apache.hadoop.hbase.client.example;
-import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
@@ -31,8 +30,6 @@ import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilderFactory;
@@ -50,7 +47,10 @@ import org.apache.hadoop.hbase.filter.KeyOnlyFilter;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
/**
* Example on how to use HBase's {@link Connection} and {@link Table} in a
@@ -90,7 +90,7 @@ import org.apache.hadoop.util.ToolRunner;
*
*/
public class MultiThreadedClientExample extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(MultiThreadedClientExample.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedClientExample.class);
private static final int DEFAULT_NUM_OPERATIONS = 500000;
/**
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java
index 0401959b68..a829b2ab1b 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.hbase.client.example;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
@@ -31,6 +29,8 @@ import org.apache.hadoop.hbase.client.coprocessor.Batch;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils.BlockingRpcCallback;
import org.apache.hadoop.hbase.ipc.ServerRpcController;
import org.apache.hadoop.hbase.protobuf.generated.RefreshHFilesProtos;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.Closeable;
import java.io.IOException;
@@ -40,7 +40,7 @@ import java.io.IOException;
* Region Server side via the RefreshHFilesService.
*/
public class RefreshHFilesClient implements Closeable {
- private static final Log LOG = LogFactory.getLog(RefreshHFilesClient.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RefreshHFilesClient.class);
private final Connection connection;
/**
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
index 233ea18ead..4735b3db72 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
@@ -25,8 +25,6 @@ import java.util.List;
import java.util.Set;
import java.util.TreeSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
@@ -50,6 +48,8 @@ import org.apache.hadoop.hbase.regionserver.OperationStatus;
import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.regionserver.RegionScanner;
import org.apache.hadoop.hbase.util.Bytes;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
@@ -97,7 +97,7 @@ import com.google.protobuf.Service;
*/
public class BulkDeleteEndpoint extends BulkDeleteService implements RegionCoprocessor {
private static final String NO_OF_VERSIONS_TO_DELETE = "noOfVersionsToDelete";
- private static final Log LOG = LogFactory.getLog(BulkDeleteEndpoint.class);
+ private static final Logger LOG = LoggerFactory.getLogger(BulkDeleteEndpoint.class);
private RegionCoprocessorEnvironment env;
@@ -167,7 +167,7 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements RegionCopro
}
}
} catch (IOException ioe) {
- LOG.error(ioe);
+ LOG.error(ioe.toString(), ioe);
// Call ServerRpcController#getFailedOn() to retrieve this IOException at client side.
CoprocessorRpcUtils.setControllerException(controller, ioe);
} finally {
@@ -175,7 +175,7 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements RegionCopro
try {
scanner.close();
} catch (IOException ioe) {
- LOG.error(ioe);
+ LOG.error(ioe.toString(), ioe);
}
}
}
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.java
index c27672cf9d..e916cb38f6 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.coprocessor.example;
import java.io.IOException;
import java.util.Optional;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -35,6 +33,8 @@ import org.apache.hadoop.hbase.metrics.Counter;
import org.apache.hadoop.hbase.metrics.Gauge;
import org.apache.hadoop.hbase.metrics.MetricRegistry;
import org.apache.hadoop.hbase.metrics.Timer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* An example coprocessor that collects some metrics to demonstrate the usage of exporting custom
@@ -53,7 +53,7 @@ public class ExampleMasterObserverWithMetrics implements MasterCoprocessor, Mast
return Optional.of(this);
}
- private static final Log LOG = LogFactory.getLog(ExampleMasterObserverWithMetrics.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ExampleMasterObserverWithMetrics.class);
/** This is the Timer metric object to keep track of the current count across invocations */
private Timer createTableTimer;
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RefreshHFilesEndpoint.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RefreshHFilesEndpoint.java
index 71d40d43ec..60cb1542fd 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RefreshHFilesEndpoint.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RefreshHFilesEndpoint.java
@@ -21,8 +21,7 @@ package org.apache.hadoop.hbase.coprocessor.example;
import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
import com.google.protobuf.Service;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.CoprocessorException;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
@@ -30,6 +29,8 @@ import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
import org.apache.hadoop.hbase.protobuf.generated.RefreshHFilesProtos;
import org.apache.hadoop.hbase.regionserver.Store;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Collections;
@@ -44,7 +45,7 @@ import java.util.Collections;
*/
public class RefreshHFilesEndpoint extends RefreshHFilesProtos.RefreshHFilesService
implements RegionCoprocessor {
- protected static final Log LOG = LogFactory.getLog(RefreshHFilesEndpoint.class);
+ protected static final Logger LOG = LoggerFactory.getLogger(RefreshHFilesEndpoint.class);
private RegionCoprocessorEnvironment env;
public RefreshHFilesEndpoint() {
diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesEndpoint.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesEndpoint.java
index 46336d53b7..77f98999ea 100644
--- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesEndpoint.java
+++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesEndpoint.java
@@ -25,8 +25,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -55,10 +53,12 @@ import org.apache.hadoop.hbase.wal.WAL;
import org.junit.After;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category(MediumTests.class)
public class TestRefreshHFilesEndpoint {
- private static final Log LOG = LogFactory.getLog(TestRefreshHFilesEndpoint.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRefreshHFilesEndpoint.class);
private static final HBaseTestingUtility HTU = new HBaseTestingUtility();
private static final int NUM_MASTER = 1;
private static final int NUM_RS = 2;
@@ -128,7 +128,7 @@ public class TestRefreshHFilesEndpoint {
if (rex.getCause() instanceof IOException)
throw new IOException();
} catch (Throwable ex) {
- LOG.error(ex);
+ LOG.error(ex.toString(), ex);
fail("Couldn't call the RefreshRegionHFilesEndpoint");
}
}
diff --git a/hbase-external-blockcache/pom.xml b/hbase-external-blockcache/pom.xml
index 26e4aea188..46e58c2ee5 100644
--- a/hbase-external-blockcache/pom.xml
+++ b/hbase-external-blockcache/pom.xml
@@ -155,8 +155,8 @@
true
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apiorg.apache.htrace
diff --git a/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java b/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java
index b12ac1dd63..a523663381 100644
--- a/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java
+++ b/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java
@@ -29,8 +29,6 @@ import java.util.List;
import java.util.NoSuchElementException;
import java.util.concurrent.ExecutionException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;
@@ -40,6 +38,8 @@ import org.apache.hadoop.hbase.trace.TraceUtil;
import org.apache.hadoop.hbase.util.Addressing;
import org.apache.htrace.core.TraceScope;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import net.spy.memcached.CachedData;
import net.spy.memcached.ConnectionFactoryBuilder;
@@ -55,7 +55,7 @@ import net.spy.memcached.transcoders.Transcoder;
*/
@InterfaceAudience.Private
public class MemcachedBlockCache implements BlockCache {
- private static final Log LOG = LogFactory.getLog(MemcachedBlockCache.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(MemcachedBlockCache.class.getName());
// Some memcache versions won't take more than 1024 * 1024. So set the limit below
// that just in case this client is used with those versions.
diff --git a/hbase-hadoop-compat/pom.xml b/hbase-hadoop-compat/pom.xml
index 47ff69bc83..08ac22dd87 100644
--- a/hbase-hadoop-compat/pom.xml
+++ b/hbase-hadoop-compat/pom.xml
@@ -88,8 +88,8 @@
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apiorg.apache.hbase
diff --git a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilityFactory.java b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilityFactory.java
index 83053164e5..5c1f1035f5 100644
--- a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilityFactory.java
+++ b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilityFactory.java
@@ -18,18 +18,18 @@
package org.apache.hadoop.hbase;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
import java.util.Iterator;
import java.util.ServiceLoader;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
/**
* Class that will create many instances of classes provided by the hbase-hadoop{1|2}-compat jars.
*/
public class CompatibilityFactory {
- private static final Log LOG = LogFactory.getLog(CompatibilitySingletonFactory.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CompatibilitySingletonFactory.class);
public static final String EXCEPTION_START = "Could not create ";
public static final String EXCEPTION_END = " Is the hadoop compatibility jar on the classpath?";
@@ -54,7 +54,7 @@ public class CompatibilityFactory {
msg.append(it.next()).append(" ");
}
msg.append("}");
- LOG.warn(msg);
+ LOG.warn(msg.toString());
}
} catch (Exception e) {
throw new RuntimeException(createExceptionString(klass), e);
diff --git a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilitySingletonFactory.java b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilitySingletonFactory.java
index be6d6d1809..3dc3f49655 100644
--- a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilitySingletonFactory.java
+++ b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilitySingletonFactory.java
@@ -18,14 +18,14 @@
package org.apache.hadoop.hbase;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.ServiceLoader;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
/**
* Factory for classes supplied by hadoop compatibility modules. Only one of each class will be
* created.
@@ -36,7 +36,7 @@ public class CompatibilitySingletonFactory extends CompatibilityFactory {
private final Object lock = new Object();
private final Map instances = new HashMap<>();
}
- private static final Log LOG = LogFactory.getLog(CompatibilitySingletonFactory.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CompatibilitySingletonFactory.class);
/**
* This is a static only class don't let anyone create an instance.
@@ -67,7 +67,7 @@ public class CompatibilitySingletonFactory extends CompatibilityFactory {
msg.append(it.next()).append(" ");
}
msg.append("}");
- LOG.warn(msg);
+ LOG.warn(msg.toString());
}
} catch (Exception e) {
throw new RuntimeException(createExceptionString(klass), e);
diff --git a/hbase-hadoop2-compat/pom.xml b/hbase-hadoop2-compat/pom.xml
index b2614357b0..0a99090b3c 100644
--- a/hbase-hadoop2-compat/pom.xml
+++ b/hbase-hadoop2-compat/pom.xml
@@ -169,8 +169,8 @@ limitations under the License.
commons-lang3
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apiorg.apache.hbase.thirdparty
diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/mapreduce/JobUtil.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/mapreduce/JobUtil.java
index 0400f7f2c9..69892851bb 100644
--- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/mapreduce/JobUtil.java
+++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/mapreduce/JobUtil.java
@@ -20,10 +20,10 @@ package org.apache.hadoop.hbase.mapreduce;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.Cluster;
@@ -35,7 +35,7 @@ import org.apache.hadoop.mapreduce.JobSubmissionFiles;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public abstract class JobUtil {
- private static final Log LOG = LogFactory.getLog(JobUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(JobUtil.class);
protected JobUtil() {
super();
diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/impl/GlobalMetricRegistriesAdapter.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/impl/GlobalMetricRegistriesAdapter.java
index 7db26a512b..0ad5d14bca 100644
--- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/impl/GlobalMetricRegistriesAdapter.java
+++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/impl/GlobalMetricRegistriesAdapter.java
@@ -26,8 +26,6 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.Optional;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.metrics.MetricRegistries;
import org.apache.hadoop.hbase.metrics.MetricRegistry;
import org.apache.hadoop.hbase.metrics.MetricRegistryInfo;
@@ -38,7 +36,8 @@ import org.apache.hadoop.metrics2.impl.JmxCacheBuster;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystemHelper;
import org.apache.hadoop.metrics2.lib.MetricsExecutorImpl;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -65,7 +64,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
public class GlobalMetricRegistriesAdapter {
- private static final Log LOG = LogFactory.getLog(GlobalMetricRegistriesAdapter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(GlobalMetricRegistriesAdapter.class);
private class MetricsSourceAdapter implements MetricsSource {
private final MetricRegistry registry;
diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/impl/HBaseMetrics2HadoopMetricsAdapter.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/impl/HBaseMetrics2HadoopMetricsAdapter.java
index 3a24b94de5..b6a17cf177 100644
--- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/impl/HBaseMetrics2HadoopMetricsAdapter.java
+++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/impl/HBaseMetrics2HadoopMetricsAdapter.java
@@ -35,8 +35,6 @@ package org.apache.hadoop.hbase.metrics.impl;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.metrics.Counter;
import org.apache.hadoop.hbase.metrics.Gauge;
import org.apache.hadoop.hbase.metrics.Histogram;
@@ -50,6 +48,8 @@ import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.lib.Interns;
import org.apache.hadoop.metrics2.lib.MutableHistogram;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This is the adapter from "HBase Metrics Framework", implemented in hbase-metrics-api and
@@ -60,8 +60,8 @@ import org.apache.hadoop.metrics2.lib.MutableHistogram;
* Some of the code is forked from https://github.com/joshelser/dropwizard-hadoop-metrics2.
*/
public class HBaseMetrics2HadoopMetricsAdapter {
- private static final Log LOG
- = LogFactory.getLog(HBaseMetrics2HadoopMetricsAdapter.class);
+ private static final Logger LOG
+ = LoggerFactory.getLogger(HBaseMetrics2HadoopMetricsAdapter.class);
private static final String EMPTY_STRING = "";
public HBaseMetrics2HadoopMetricsAdapter() {
diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionAggregateSourceImpl.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionAggregateSourceImpl.java
index b03549dfa0..cb78ccf183 100644
--- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionAggregateSourceImpl.java
+++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionAggregateSourceImpl.java
@@ -23,9 +23,9 @@ import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.metrics.BaseSourceImpl;
import org.apache.hadoop.hbase.metrics.Interns;
import org.apache.hadoop.metrics2.MetricsCollector;
@@ -37,7 +37,7 @@ import org.apache.hadoop.metrics2.lib.MetricsExecutorImpl;
public class MetricsRegionAggregateSourceImpl extends BaseSourceImpl
implements MetricsRegionAggregateSource {
- private static final Log LOG = LogFactory.getLog(MetricsRegionAggregateSourceImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MetricsRegionAggregateSourceImpl.class);
private final MetricsExecutorImpl executor = new MetricsExecutorImpl();
diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
index 09175d5dbe..8f11811a94 100644
--- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
+++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.regionserver;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.metrics.Interns;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry;
@@ -31,7 +31,7 @@ import org.apache.hadoop.metrics2.lib.MutableFastCounter;
@InterfaceAudience.Private
public class MetricsRegionSourceImpl implements MetricsRegionSource {
- private static final Log LOG = LogFactory.getLog(MetricsRegionSourceImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MetricsRegionSourceImpl.class);
private AtomicBoolean closed = new AtomicBoolean(false);
diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsTableAggregateSourceImpl.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsTableAggregateSourceImpl.java
index 5ef8d81246..588986e7c4 100644
--- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsTableAggregateSourceImpl.java
+++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsTableAggregateSourceImpl.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.regionserver;
import java.util.concurrent.ConcurrentHashMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.metrics.BaseSourceImpl;
import org.apache.hadoop.hbase.metrics.Interns;
import org.apache.hadoop.metrics2.MetricsCollector;
@@ -32,7 +32,7 @@ import org.apache.hadoop.metrics2.MetricsRecordBuilder;
public class MetricsTableAggregateSourceImpl extends BaseSourceImpl
implements MetricsTableAggregateSource {
- private static final Log LOG = LogFactory.getLog(MetricsTableAggregateSourceImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MetricsTableAggregateSourceImpl.class);
private ConcurrentHashMap tableSources = new ConcurrentHashMap<>();
public MetricsTableAggregateSourceImpl() {
diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsTableSourceImpl.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsTableSourceImpl.java
index f95eb4c597..2269d9ab93 100644
--- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsTableSourceImpl.java
+++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsTableSourceImpl.java
@@ -20,10 +20,10 @@ package org.apache.hadoop.hbase.regionserver;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.metrics.Interns;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry;
@@ -31,7 +31,7 @@ import org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry;
@InterfaceAudience.Private
public class MetricsTableSourceImpl implements MetricsTableSource {
- private static final Log LOG = LogFactory.getLog(MetricsTableSourceImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MetricsTableSourceImpl.class);
private AtomicBoolean closed = new AtomicBoolean(false);
diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
index 00763c67d0..dc5608014e 100644
--- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
+++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
@@ -22,9 +22,9 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.metrics2.MetricsExecutor;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.metrics2.lib.MetricsExecutorImpl;
@@ -42,7 +42,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public class JmxCacheBuster {
- private static final Log LOG = LogFactory.getLog(JmxCacheBuster.class);
+ private static final Logger LOG = LoggerFactory.getLogger(JmxCacheBuster.class);
private static AtomicReference fut = new AtomicReference<>(null);
private static MetricsExecutor executor = new MetricsExecutorImpl();
private static AtomicBoolean stopped = new AtomicBoolean(false);
diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystemHelper.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystemHelper.java
index eb465c38c8..3ecd8887ef 100644
--- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystemHelper.java
+++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystemHelper.java
@@ -20,13 +20,12 @@ package org.apache.hadoop.metrics2.lib;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.HashMap;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class DefaultMetricsSystemHelper {
- private static final Log LOG = LogFactory.getLog(DefaultMetricsSystemHelper.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DefaultMetricsSystemHelper.class);
private final Method removeObjectMethod;
private final Field sourceNamesField;
private final Field mapField;
@@ -49,7 +48,7 @@ public class DefaultMetricsSystemHelper {
f2 = UniqueNames.class.getDeclaredField("map");
f2.setAccessible(true);
} catch (NoSuchFieldException e) {
- LOG.trace(e);
+ LOG.trace(e.toString(), e);
f1 = null;
f2 = null;
}
diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DynamicMetricsRegistry.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DynamicMetricsRegistry.java
index a7d221b3b5..0b8111bb65 100644
--- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DynamicMetricsRegistry.java
+++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DynamicMetricsRegistry.java
@@ -21,9 +21,9 @@ package org.apache.hadoop.metrics2.lib;
import java.util.Collection;
import java.util.concurrent.ConcurrentMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.metrics.Interns;
import org.apache.hadoop.metrics2.MetricsException;
import org.apache.hadoop.metrics2.MetricsInfo;
@@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
*/
@InterfaceAudience.Private
public class DynamicMetricsRegistry {
- private static final Log LOG = LogFactory.getLog(DynamicMetricsRegistry.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DynamicMetricsRegistry.class);
private final ConcurrentMap metricsMap =
Maps.newConcurrentMap();
diff --git a/hbase-http/pom.xml b/hbase-http/pom.xml
index 048ebff16b..a152a660db 100644
--- a/hbase-http/pom.xml
+++ b/hbase-http/pom.xml
@@ -208,7 +208,6 @@
org.apache.hbasehbase-resource-bundle
- ${project.version}true
@@ -249,8 +248,8 @@
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apilog4j
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLog.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLog.java
index cfc0640dad..243da1b14f 100644
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLog.java
+++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLog.java
@@ -19,22 +19,22 @@ package org.apache.hadoop.hbase.http;
import java.util.HashMap;
-import org.apache.commons.logging.impl.Log4JLogger;
-import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogConfigurationException;
-import org.apache.commons.logging.LogFactory;
+import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.log4j.Appender;
-import org.apache.log4j.Logger;
-
-import org.eclipse.jetty.server.RequestLog;
+import org.apache.log4j.LogManager;
import org.eclipse.jetty.server.NCSARequestLog;
+import org.eclipse.jetty.server.RequestLog;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.slf4j.impl.Log4jLoggerAdapter;
/**
* RequestLog object for use with Http
*/
public class HttpRequestLog {
- private static final Log LOG = LogFactory.getLog(HttpRequestLog.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HttpRequestLog.class);
private static final HashMap serverToComponent;
static {
@@ -43,6 +43,19 @@ public class HttpRequestLog {
serverToComponent.put("region", "regionserver");
}
+ private static org.apache.log4j.Logger getLog4jLogger(String loggerName) {
+ Logger logger = LoggerFactory.getLogger(loggerName);
+
+ if (logger instanceof Log4JLogger) {
+ Log4JLogger httpLog4JLog = (Log4JLogger)logger;
+ return httpLog4JLog.getLogger();
+ } else if (logger instanceof Log4jLoggerAdapter) {
+ return LogManager.getLogger(loggerName);
+ } else {
+ return null;
+ }
+ }
+
public static RequestLog getRequestLog(String name) {
String lookup = serverToComponent.get(name);
@@ -51,42 +64,40 @@ public class HttpRequestLog {
}
String loggerName = "http.requests." + name;
String appenderName = name + "requestlog";
- Log logger = LogFactory.getLog(loggerName);
- if (logger instanceof Log4JLogger) {
- Log4JLogger httpLog4JLog = (Log4JLogger)logger;
- Logger httpLogger = httpLog4JLog.getLogger();
- Appender appender = null;
+ org.apache.log4j.Logger httpLogger = getLog4jLogger(loggerName);
- try {
- appender = httpLogger.getAppender(appenderName);
- } catch (LogConfigurationException e) {
- LOG.warn("Http request log for " + loggerName
- + " could not be created");
- throw e;
- }
+ if (httpLogger == null) {
+ LOG.warn("Jetty request log can only be enabled using Log4j");
+ return null;
+ }
- if (appender == null) {
- LOG.info("Http request log for " + loggerName
- + " is not defined");
- return null;
- }
+ Appender appender = null;
- if (appender instanceof HttpRequestLogAppender) {
- HttpRequestLogAppender requestLogAppender
- = (HttpRequestLogAppender)appender;
- NCSARequestLog requestLog = new NCSARequestLog();
- requestLog.setFilename(requestLogAppender.getFilename());
- requestLog.setRetainDays(requestLogAppender.getRetainDays());
- return requestLog;
- } else {
- LOG.warn("Jetty request log for " + loggerName
- + " was of the wrong class");
- return null;
- }
+ try {
+ appender = httpLogger.getAppender(appenderName);
+ } catch (LogConfigurationException e) {
+ LOG.warn("Http request log for " + loggerName
+ + " could not be created");
+ throw e;
}
- else {
- LOG.warn("Jetty request log can only be enabled using Log4j");
+
+ if (appender == null) {
+ LOG.info("Http request log for " + loggerName
+ + " is not defined");
+ return null;
+ }
+
+ if (appender instanceof HttpRequestLogAppender) {
+ HttpRequestLogAppender requestLogAppender
+ = (HttpRequestLogAppender)appender;
+ NCSARequestLog requestLog = new NCSARequestLog();
+ requestLog.setFilename(requestLogAppender.getFilename());
+ requestLog.setRetainDays(requestLogAppender.getRetainDays());
+ return requestLog;
+ } else {
+ LOG.warn("Jetty request log for " + loggerName
+ + " was of the wrong class");
return null;
}
}
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
index c2b5944ba2..087a33f82e 100644
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
+++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
@@ -47,8 +47,6 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletRequestWrapper;
import javax.servlet.http.HttpServletResponse;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@@ -93,6 +91,8 @@ import org.eclipse.jetty.webapp.WebAppContext;
import org.glassfish.jersey.server.ResourceConfig;
import org.glassfish.jersey.servlet.ServletContainer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Create a Jetty embedded server to answer http requests. The primary goal
@@ -105,7 +105,7 @@ import org.glassfish.jersey.servlet.ServletContainer;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class HttpServer implements FilterContainer {
- private static final Log LOG = LogFactory.getLog(HttpServer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HttpServer.class);
private static final String EMPTY_STRING = "";
private static final int DEFAULT_MAX_HEADER_SIZE = 64 * 1024; // 64K
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/jmx/JMXJsonServlet.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/jmx/JMXJsonServlet.java
index 2e43be2677..13d6c400ac 100644
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/jmx/JMXJsonServlet.java
+++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/jmx/JMXJsonServlet.java
@@ -31,10 +31,10 @@ import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.http.HttpServer;
import org.apache.hadoop.hbase.util.JSONBean;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/*
* This servlet is based off of the JMXProxyServlet from Tomcat 7.0.14. It has
@@ -111,7 +111,7 @@ import org.apache.hadoop.hbase.util.JSONBean;
*
*/
public class JMXJsonServlet extends HttpServlet {
- private static final Log LOG = LogFactory.getLog(JMXJsonServlet.class);
+ private static final Logger LOG = LoggerFactory.getLogger(JMXJsonServlet.class);
private static final long serialVersionUID = 1L;
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/lib/StaticUserWebFilter.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/lib/StaticUserWebFilter.java
index a1fa9f00a0..72cedddd68 100644
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/lib/StaticUserWebFilter.java
+++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/lib/StaticUserWebFilter.java
@@ -17,10 +17,14 @@
*/
package org.apache.hadoop.hbase.http.lib;
+import static org.apache.hadoop.hbase.http.ServerConfigurationKeys.DEFAULT_HBASE_HTTP_STATIC_USER;
+import static org.apache.hadoop.hbase.http.ServerConfigurationKeys.HBASE_HTTP_STATIC_USER;
+
import java.io.IOException;
import java.security.Principal;
import java.util.HashMap;
+import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
@@ -29,18 +33,13 @@ import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletRequestWrapper;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.http.FilterContainer;
import org.apache.hadoop.hbase.http.FilterInitializer;
-
-import javax.servlet.Filter;
-
-import static org.apache.hadoop.hbase.http.ServerConfigurationKeys.HBASE_HTTP_STATIC_USER;
-import static org.apache.hadoop.hbase.http.ServerConfigurationKeys.DEFAULT_HBASE_HTTP_STATIC_USER;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Provides a servlet filter that pretends to authenticate a fake user (Dr.Who)
@@ -50,7 +49,7 @@ import static org.apache.hadoop.hbase.http.ServerConfigurationKeys.DEFAULT_HBASE
public class StaticUserWebFilter extends FilterInitializer {
static final String DEPRECATED_UGI_KEY = "dfs.web.ugi";
- private static final Log LOG = LogFactory.getLog(StaticUserWebFilter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(StaticUserWebFilter.class);
static class User implements Principal {
private final String name;
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
index e23eecdbad..0d409726e9 100644
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
+++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
@@ -30,14 +30,16 @@ import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Jdk14Logger;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.slf4j.impl.Log4jLoggerAdapter;
import org.apache.hadoop.hbase.http.HttpServer;
import org.apache.hadoop.util.ServletUtil;
+import org.apache.log4j.LogManager;
/**
* Change log level in runtime.
@@ -115,7 +117,7 @@ public class LogLevel {
out.println(MARKER
+ "Submitted Log Name: " + logName + " ");
- Log log = LogFactory.getLog(logName);
+ Logger log = LoggerFactory.getLogger(logName);
out.println(MARKER
+ "Log Class: " + log.getClass().getName() +" ");
if (level != null) {
@@ -124,11 +126,11 @@ public class LogLevel {
if (log instanceof Log4JLogger) {
process(((Log4JLogger)log).getLogger(), level, out);
- }
- else if (log instanceof Jdk14Logger) {
+ } else if (log instanceof Jdk14Logger) {
process(((Jdk14Logger)log).getLogger(), level, out);
- }
- else {
+ } else if (log instanceof Log4jLoggerAdapter) {
+ process(LogManager.getLogger(logName), level, out);
+ } else {
out.println("Sorry, " + log.getClass() + " not supported. ");
}
}
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestGlobalFilter.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestGlobalFilter.java
index 729dd06b4c..f2461a16aa 100644
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestGlobalFilter.java
+++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestGlobalFilter.java
@@ -33,18 +33,18 @@ import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.net.NetUtils;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MiscTests.class, SmallTests.class})
public class TestGlobalFilter extends HttpServerFunctionalTest {
- private static final Log LOG = LogFactory.getLog(HttpServer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HttpServer.class);
static final Set RECORDS = new TreeSet<>();
/** A very simple filter that records accessed uri's */
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
index fddb2a4842..68c075229b 100644
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
+++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
@@ -45,8 +45,6 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletRequestWrapper;
import javax.servlet.http.HttpServletResponse;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.hbase.http.HttpServer.QuotingInputFilter.RequestQuoter;
@@ -67,10 +65,12 @@ import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MiscTests.class, SmallTests.class})
public class TestHttpServer extends HttpServerFunctionalTest {
- private static final Log LOG = LogFactory.getLog(TestHttpServer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHttpServer.class);
private static HttpServer server;
private static URL baseUrl;
// jetty 9.4.x needs this many threads to start, even in the small.
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java
index db394a8d7e..3edb12c4f7 100644
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java
+++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java
@@ -21,9 +21,8 @@ import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-import org.apache.commons.logging.LogFactory;
-import org.apache.commons.logging.Log;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.FileNotFoundException;
/**
@@ -31,7 +30,7 @@ import java.io.FileNotFoundException;
*/
@Category({MiscTests.class, SmallTests.class})
public class TestHttpServerWebapps extends HttpServerFunctionalTest {
- private static final Log log = LogFactory.getLog(TestHttpServerWebapps.class);
+ private static final Logger log = LoggerFactory.getLogger(TestHttpServerWebapps.class);
/**
* Test that the test server is loadable on the classpath
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java
index 5eff2b4fb3..cffb4c3c09 100644
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java
+++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java
@@ -33,18 +33,18 @@ import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.net.NetUtils;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MiscTests.class, SmallTests.class})
public class TestPathFilter extends HttpServerFunctionalTest {
- private static final Log LOG = LogFactory.getLog(HttpServer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HttpServer.class);
static final Set RECORDS = new TreeSet<>();
/** A very simple filter that records accessed uri's */
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java
index b599350cc6..282530f0ec 100644
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java
+++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java
@@ -25,8 +25,6 @@ import java.net.URL;
import javax.net.ssl.HttpsURLConnection;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -40,6 +38,8 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This testcase issues SSL certificates configures the HttpServer to serve
@@ -51,7 +51,7 @@ public class TestSSLHttpServer extends HttpServerFunctionalTest {
private static final String BASEDIR = System.getProperty("test.build.dir",
"target/test-dir") + "/" + TestSSLHttpServer.class.getSimpleName();
- private static final Log LOG = LogFactory.getLog(TestSSLHttpServer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSSLHttpServer.class);
private static Configuration conf;
private static HttpServer server;
private static URL baseUrl;
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java
index 32bc03ed2b..756487199c 100644
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java
+++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java
@@ -32,8 +32,6 @@ import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
@@ -43,10 +41,12 @@ import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MiscTests.class, SmallTests.class})
public class TestServletFilter extends HttpServerFunctionalTest {
- private static final Log LOG = LogFactory.getLog(HttpServer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HttpServer.class);
static volatile String uri = null;
/** A very simple filter which record the uri filtered. */
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java
index 4fad03139a..13e2519da5 100644
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java
+++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java
@@ -16,10 +16,8 @@
*/
package org.apache.hadoop.hbase.http;
-import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
-import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.security.Principal;
@@ -29,8 +27,6 @@ import java.util.Set;
import javax.security.auth.Subject;
import javax.security.auth.kerberos.KerberosTicket;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.http.TestHttpServer.EchoServlet;
import org.apache.hadoop.hbase.http.resource.JerseyResource;
@@ -48,8 +44,6 @@ import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.protocol.HttpClientContext;
import org.apache.http.config.Lookup;
import org.apache.http.config.RegistryBuilder;
-import org.apache.http.entity.ByteArrayEntity;
-import org.apache.http.entity.ContentType;
import org.apache.http.impl.auth.SPNegoSchemeFactory;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.impl.client.HttpClients;
@@ -65,6 +59,8 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test class for SPNEGO authentication on the HttpServer. Uses Kerby's MiniKDC and Apache
@@ -72,7 +68,7 @@ import org.junit.experimental.categories.Category;
*/
@Category({MiscTests.class, SmallTests.class})
public class TestSpnegoHttpServer extends HttpServerFunctionalTest {
- private static final Log LOG = LogFactory.getLog(TestSpnegoHttpServer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSpnegoHttpServer.class);
private static final String KDC_SERVER_HOST = "localhost";
private static final String CLIENT_PRINCIPAL = "client";
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/jmx/TestJMXJsonServlet.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/jmx/TestJMXJsonServlet.java
index 484162af55..9a6399d593 100644
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/jmx/TestJMXJsonServlet.java
+++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/jmx/TestJMXJsonServlet.java
@@ -25,8 +25,6 @@ import java.util.regex.Pattern;
import javax.servlet.http.HttpServletResponse;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.http.HttpServer;
@@ -35,10 +33,12 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MiscTests.class, SmallTests.class})
public class TestJMXJsonServlet extends HttpServerFunctionalTest {
- private static final Log LOG = LogFactory.getLog(TestJMXJsonServlet.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestJMXJsonServlet.class);
private static HttpServer server;
private static URL baseUrl;
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java
index e14e3b4846..84d2493ba3 100644
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java
+++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java
@@ -19,18 +19,23 @@ package org.apache.hadoop.hbase.http.log;
import static org.junit.Assert.assertTrue;
-import java.io.*;
-import java.net.*;
+import java.io.BufferedReader;
+import java.io.InputStreamReader;
+import java.io.PrintStream;
+import java.net.URI;
+import java.net.URL;
+import org.apache.hadoop.hbase.http.HttpServer;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.hbase.http.HttpServer;
import org.apache.hadoop.net.NetUtils;
-import org.apache.commons.logging.*;
-import org.apache.commons.logging.impl.*;
-import org.apache.log4j.*;
+import org.apache.log4j.Level;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.LoggerFactory;
+import org.slf4j.impl.Log4jLoggerAdapter;
@Category({MiscTests.class, SmallTests.class})
public class TestLogLevel {
@@ -40,11 +45,11 @@ public class TestLogLevel {
@SuppressWarnings("deprecation")
public void testDynamicLogLevel() throws Exception {
String logName = TestLogLevel.class.getName();
- Log testlog = LogFactory.getLog(logName);
+ org.slf4j.Logger testlog = LoggerFactory.getLogger(logName);
//only test Log4JLogger
- if (testlog instanceof Log4JLogger) {
- Logger log = ((Log4JLogger)testlog).getLogger();
+ if (testlog instanceof Log4jLoggerAdapter) {
+ Logger log = LogManager.getLogger(logName);
log.debug("log.debug1");
log.info("log.info1");
log.error("log.error1");
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java
index bf0e609192..31ff0ed4e0 100644
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java
+++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java
@@ -30,9 +30,9 @@ import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.eclipse.jetty.util.ajax.JSON;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A simple Jersey resource class TestHttpServer.
@@ -41,7 +41,7 @@ import org.eclipse.jetty.util.ajax.JSON;
*/
@Path("")
public class JerseyResource {
- private static final Log LOG = LogFactory.getLog(JerseyResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(JerseyResource.class);
public static final String PATH = "path";
public static final String OP = "op";
diff --git a/hbase-it/pom.xml b/hbase-it/pom.xml
index 3cd3e6c28d..f75024de6d 100644
--- a/hbase-it/pom.xml
+++ b/hbase-it/pom.xml
@@ -236,8 +236,8 @@
metrics-core
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apicommons-cli
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java
index e8a00416e5..6546ac9288 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java
@@ -22,6 +22,7 @@ import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
+import java.util.Objects;
import java.util.Set;
import java.util.TreeSet;
@@ -422,7 +423,7 @@ public class DistributedHBaseCluster extends HBaseCluster {
LOG.warn("Restoring cluster - restoring region servers reported "
+ deferred.size() + " errors:");
for (int i=0; i columnFamilies;
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/PolicyBasedChaosMonkey.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/PolicyBasedChaosMonkey.java
index 70452bb556..a49f54117e 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/PolicyBasedChaosMonkey.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/PolicyBasedChaosMonkey.java
@@ -23,18 +23,18 @@ import java.util.Collection;
import java.util.List;
import org.apache.commons.lang3.RandomUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.chaos.policies.Policy;
import org.apache.hadoop.hbase.util.Pair;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Chaos monkey that given multiple policies will run actions against the cluster.
*/
public class PolicyBasedChaosMonkey extends ChaosMonkey {
- private static final Log LOG = LogFactory.getLog(PolicyBasedChaosMonkey.class);
+ private static final Logger LOG = LoggerFactory.getLogger(PolicyBasedChaosMonkey.class);
private static final long ONE_SEC = 1000;
private static final long FIVE_SEC = 5 * ONE_SEC;
private static final long ONE_MIN = 60 * ONE_SEC;
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/Policy.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/Policy.java
index 6b365f81a5..81267a6568 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/Policy.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/Policy.java
@@ -18,18 +18,18 @@
package org.apache.hadoop.hbase.chaos.policies;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.chaos.actions.Action;
import org.apache.hadoop.hbase.util.StoppableImplementation;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A policy to introduce chaos to the cluster
*/
public abstract class Policy extends StoppableImplementation implements Runnable {
- protected static final Log LOG = LogFactory.getLog(Policy.class);
+ protected static final Logger LOG = LoggerFactory.getLogger(Policy.class);
protected PolicyContext context;
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/ChaosMonkeyRunner.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/ChaosMonkeyRunner.java
index d72111f0ef..8385c15628 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/ChaosMonkeyRunner.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/ChaosMonkeyRunner.java
@@ -23,8 +23,6 @@ import java.util.Set;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
@@ -34,11 +32,12 @@ import org.apache.hadoop.hbase.chaos.factories.MonkeyFactory;
import org.apache.hadoop.hbase.chaos.monkies.ChaosMonkey;
import org.apache.hadoop.hbase.util.AbstractHBaseTool;
import org.apache.hadoop.util.ToolRunner;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
public class ChaosMonkeyRunner extends AbstractHBaseTool {
- private static final Log LOG = LogFactory.getLog(ChaosMonkeyRunner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ChaosMonkeyRunner.class);
public static final String MONKEY_LONG_OPT = "monkey";
public static final String CHAOS_MONKEY_PROPS = "monkeyProps";
@@ -75,7 +74,7 @@ public class ChaosMonkeyRunner extends AbstractHBaseTool {
monkeyProps.load(this.getClass().getClassLoader()
.getResourceAsStream(chaosMonkeyPropsFile));
} catch (IOException e) {
- LOG.warn(e);
+ LOG.warn(e.toString(), e);
System.exit(EXIT_FAILURE);
}
}
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/Monkeys.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/Monkeys.java
index 1ce4356569..cb29427e9d 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/Monkeys.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/Monkeys.java
@@ -24,11 +24,12 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.IntegrationTestingUtility;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
@@ -36,7 +37,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa
* This class can be used to control chaos monkeys life cycle.
*/
public class Monkeys implements Closeable {
- private static final Log LOG = LogFactory.getLog(Monkeys.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Monkeys.class);
private final Configuration conf;
private final ChaosMonkeyRunner monkeyRunner;
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java
index 27a2d8506f..9754d4eac6 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java
@@ -37,8 +37,6 @@ import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.codec.Codec;
@@ -51,13 +49,14 @@ import org.apache.hadoop.hbase.util.Threads;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@Category(IntegrationTests.class)
public class IntegrationTestRpcClient {
- private static final Log LOG = LogFactory.getLog(IntegrationTestRpcClient.class);
+ private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestRpcClient.class);
private final Configuration conf;
@@ -203,7 +202,7 @@ public class IntegrationTestRpcClient {
try {
cluster.startServer();
} catch (Exception e) {
- LOG.warn(e);
+ LOG.warn(e.toString(), e);
exception.compareAndSet(null, e);
}
} else {
@@ -211,7 +210,7 @@ public class IntegrationTestRpcClient {
try {
cluster.stopRandomServer();
} catch (Exception e) {
- LOG.warn(e);
+ LOG.warn(e.toString(), e);
exception.compareAndSet(null, e);
}
}
@@ -261,7 +260,7 @@ public class IntegrationTestRpcClient {
BlockingInterface stub = newBlockingStub(rpcClient, server.getListenerAddress());
ret = stub.echo(null, param);
} catch (Exception e) {
- LOG.warn(e);
+ LOG.warn(e.toString(), e);
continue; // expected in case connection is closing or closed
}
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java
index 3fa1054d95..2588e635a1 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java
@@ -34,8 +34,6 @@ import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.lang3.RandomStringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
@@ -88,7 +86,8 @@ import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.ToolRunner;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
@@ -128,7 +127,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
@Category(IntegrationTests.class)
public class IntegrationTestBulkLoad extends IntegrationTestBase {
- private static final Log LOG = LogFactory.getLog(IntegrationTestBulkLoad.class);
+ private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestBulkLoad.class);
private static final byte[] CHAIN_FAM = Bytes.toBytes("L");
private static final byte[] SORT_FAM = Bytes.toBytes("S");
@@ -197,7 +196,7 @@ public class IntegrationTestBulkLoad extends IntegrationTestBase {
Thread.sleep(sleepTime.get());
}
} catch (InterruptedException e1) {
- LOG.error(e1);
+ LOG.error(e1.toString(), e1);
}
}
}
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java
index dfc54e0b33..ab5f2bb827 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java
@@ -30,8 +30,6 @@ import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
@@ -58,6 +56,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Validate ImportTsv + LoadIncrementalHFiles on a distributed cluster.
@@ -66,7 +66,7 @@ import org.junit.rules.TestName;
public class IntegrationTestImportTsv extends Configured implements Tool {
private static final String NAME = IntegrationTestImportTsv.class.getSimpleName();
- private static final Log LOG = LogFactory.getLog(IntegrationTestImportTsv.class);
+ private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestImportTsv.class);
protected static final String simple_tsv =
"row1\t1\tc1\tc2\n" +
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java
index 2df1c4bff1..065cec916f 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.mapreduce;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -34,6 +32,8 @@ import org.apache.hadoop.util.ToolRunner;
import org.junit.After;
import org.junit.Before;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* An integration test to test {@link TableSnapshotInputFormat} which enables
@@ -68,8 +68,8 @@ import org.junit.experimental.categories.Category;
@Category(IntegrationTests.class)
// Not runnable as a unit test. See TestTableSnapshotInputFormat
public class IntegrationTestTableSnapshotInputFormat extends IntegrationTestBase {
-
- private static final Log LOG = LogFactory.getLog(IntegrationTestTableSnapshotInputFormat.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(IntegrationTestTableSnapshotInputFormat.class);
private static final String TABLE_NAME_KEY = "IntegrationTestTableSnapshotInputFormat.table";
private static final String DEFAULT_TABLE_NAME = "IntegrationTestTableSnapshotInputFormat";
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
index e45baf151c..bb31ece973 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
@@ -30,8 +30,6 @@ import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import org.apache.commons.lang3.RandomStringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.IntegrationTestingUtility;
@@ -74,6 +72,8 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Integration test that should benchmark how fast HBase can recover from failures. This test starts
@@ -121,7 +121,7 @@ public class IntegrationTestMTTR {
* Constants.
*/
private static final byte[] FAMILY = Bytes.toBytes("d");
- private static final Log LOG = LogFactory.getLog(IntegrationTestMTTR.class);
+ private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestMTTR.class);
private static long sleepTime;
private static final String SLEEP_TIME_KEY = "hbase.IntegrationTestMTTR.sleeptime";
private static final long SLEEP_TIME_DEFAULT = 60 * 1000l;
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java
index b10e54a74f..f5f2ff99f2 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java
@@ -19,8 +19,6 @@
*/
package org.apache.hadoop.hbase.rsgroup;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.Waiter;
@@ -29,6 +27,8 @@ import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.junit.After;
import org.junit.Before;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Runs all of the units tests defined in TestGroupBase as an integration test.
@@ -36,7 +36,7 @@ import org.junit.experimental.categories.Category;
*/
@Category(IntegrationTests.class)
public class IntegrationTestRSGroup extends TestRSGroupsBase {
- private final static Log LOG = LogFactory.getLog(IntegrationTestRSGroup.class);
+ private final static Logger LOG = LoggerFactory.getLogger(IntegrationTestRSGroup.class);
private static boolean initialized = false;
@Before
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
index b9cc69d9bf..826db07552 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
@@ -42,8 +42,6 @@ import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
@@ -119,7 +117,8 @@ import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
/**
@@ -253,7 +252,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase {
*/
static class Generator extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(Generator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Generator.class);
/**
* Set this configuration if you want to test single-column family flush works. If set, we will
@@ -854,7 +853,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase {
* WALs and oldWALs dirs (Some of this is TODO).
*/
static class Search extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(Search.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Search.class);
protected Job job;
private static void printUsage(final String error) {
@@ -914,7 +913,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase {
try {
LOG.info("Found cell=" + cell + " , walKey=" + context.getCurrentKey());
} catch (IOException|InterruptedException e) {
- LOG.warn(e);
+ LOG.warn(e.toString(), e);
}
if (rows.addAndGet(1) < MISSING_ROWS_TO_LOG) {
context.getCounter(FOUND_GROUP_KEY, keyStr).increment(1);
@@ -1016,7 +1015,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase {
*/
static class Verify extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(Verify.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Verify.class);
protected static final BytesWritable DEF = new BytesWritable(new byte[] { 0 });
protected static final BytesWritable DEF_LOST_FAMILIES = new BytesWritable(new byte[] { 1 });
@@ -1455,7 +1454,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase {
*/
static class Loop extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(Loop.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Loop.class);
private static final String USAGE = "Usage: Loop " +
"
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apiorg.apache.zookeeper
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java
index 91ef71404d..b0674bf401 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java
@@ -19,12 +19,11 @@
package org.apache.hadoop.hbase.mapred;
import java.io.IOException;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.RegionLocator;
@@ -44,28 +43,30 @@ import org.apache.hadoop.mapred.Partitioner;
@InterfaceAudience.Public
public class HRegionPartitioner
implements Partitioner {
- private static final Log LOG = LogFactory.getLog(HRegionPartitioner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HRegionPartitioner.class);
// Connection and locator are not cleaned up; they just die when partitioner is done.
private Connection connection;
private RegionLocator locator;
private byte[][] startKeys;
+ @Override
public void configure(JobConf job) {
try {
this.connection = ConnectionFactory.createConnection(HBaseConfiguration.create(job));
TableName tableName = TableName.valueOf(job.get(TableOutputFormat.OUTPUT_TABLE));
this.locator = this.connection.getRegionLocator(tableName);
} catch (IOException e) {
- LOG.error(e);
+ LOG.error(e.toString(), e);
}
try {
this.startKeys = this.locator.getStartKeys();
} catch (IOException e) {
- LOG.error(e);
+ LOG.error(e.toString(), e);
}
}
+ @Override
public int getPartition(ImmutableBytesWritable key, V2 value, int numPartitions) {
byte[] region = null;
// Only one region return 0
@@ -77,7 +78,7 @@ implements Partitioner {
// here if a region splits while mapping
region = locator.getRegionLocation(key.get()).getRegionInfo().getStartKey();
} catch (IOException e) {
- LOG.error(e);
+ LOG.error(e.toString(), e);
}
for (int i = 0; i < this.startKeys.length; i++){
if (Bytes.compareTo(region, this.startKeys[i]) == 0 ){
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java
index 3460fe7df8..ba1df4c3a8 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java
@@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.mapred;
import java.io.IOException;
import java.util.Iterator;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.mapred.MapReduceBase;
@@ -38,8 +38,8 @@ public class IdentityTableReduce
extends MapReduceBase
implements TableReduce {
@SuppressWarnings("unused")
- private static final Log LOG =
- LogFactory.getLog(IdentityTableReduce.class.getName());
+ private static final Logger LOG =
+ LoggerFactory.getLogger(IdentityTableReduce.class.getName());
/**
* No aggregation, output pairs of (key, record)
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java
index cbd72362ee..d9bb66bdf0 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.mapred;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
@@ -39,7 +39,7 @@ import org.apache.hadoop.util.StringUtils;
@InterfaceAudience.Public
public class TableInputFormat extends TableInputFormatBase implements
JobConfigurable {
- private static final Log LOG = LogFactory.getLog(TableInputFormat.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableInputFormat.class);
/**
* space delimited list of columns
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java
index 48ee763cf2..509972e92a 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java
@@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.mapred;
import java.io.Closeable;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
@@ -79,7 +79,7 @@ import org.apache.hadoop.mapred.Reporter;
@InterfaceAudience.Public
public abstract class TableInputFormatBase
implements InputFormat {
- private static final Log LOG = LogFactory.getLog(TableInputFormatBase.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableInputFormatBase.class);
private byte [][] inputColumns;
private Table table;
private RegionLocator regionLocator;
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java
index 95be24f195..a49d0ec5c3 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java
@@ -19,10 +19,9 @@
package org.apache.hadoop.hbase.mapred;
import java.io.IOException;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
@@ -43,7 +42,7 @@ import static org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl.LOG_PER_RO
*/
@InterfaceAudience.Public
public class TableRecordReaderImpl {
- private static final Log LOG = LogFactory.getLog(TableRecordReaderImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableRecordReaderImpl.class);
private byte [] startRow;
private byte [] endRow;
@@ -248,7 +247,7 @@ public class TableRecordReaderImpl {
long now = System.currentTimeMillis();
LOG.info("Mapper took " + (now-timestamp)
+ "ms to process " + rowcount + " rows");
- LOG.info(ioe);
+ LOG.info(ioe.toString(), ioe);
String lastRow = lastSuccessfulRow == null ?
"null" : Bytes.toStringBinary(lastSuccessfulRow);
LOG.info("lastSuccessfulRow=" + lastRow);
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
index 48cc0d5a07..6d6125f033 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
@@ -20,11 +20,11 @@ package org.apache.hadoop.hbase.mapreduce;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
@@ -72,8 +72,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
*/
@InterfaceAudience.Public
public class CellCounter extends Configured implements Tool {
- private static final Log LOG =
- LogFactory.getLog(CellCounter.class.getName());
+ private static final Logger LOG =
+ LoggerFactory.getLogger(CellCounter.class.getName());
/**
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java
index 81af16580c..2e9e62cf37 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java
@@ -23,8 +23,6 @@ import java.util.HashMap;
import java.util.Map;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -32,6 +30,8 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -49,7 +49,7 @@ import org.apache.hadoop.util.ToolRunner;
*/
@InterfaceAudience.Public
public class CopyTable extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(CopyTable.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CopyTable.class);
final static String NAME = "copytable";
long startTime = 0;
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/DefaultVisibilityExpressionResolver.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/DefaultVisibilityExpressionResolver.java
index 775739faa8..07f05dd798 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/DefaultVisibilityExpressionResolver.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/DefaultVisibilityExpressionResolver.java
@@ -26,12 +26,12 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.Tag;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Result;
@@ -50,7 +50,8 @@ import org.apache.hadoop.hbase.util.Bytes;
*/
@InterfaceAudience.Private
public class DefaultVisibilityExpressionResolver implements VisibilityExpressionResolver {
- private static final Log LOG = LogFactory.getLog(DefaultVisibilityExpressionResolver.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(DefaultVisibilityExpressionResolver.class);
private Configuration conf;
private final Map labels = new HashMap<>();
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ExportUtils.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ExportUtils.java
index 7107537223..34f33983bd 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ExportUtils.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ExportUtils.java
@@ -21,14 +21,15 @@ package org.apache.hadoop.hbase.mapreduce;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.Filter;
@@ -48,7 +49,7 @@ import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
*/
@InterfaceAudience.Private
public final class ExportUtils {
- private static final Log LOG = LogFactory.getLog(ExportUtils.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ExportUtils.class);
public static final String RAW_SCAN = "hbase.mapreduce.include.deleted.rows";
public static final String EXPORT_BATCHING = "hbase.export.scanner.batch";
public static final String EXPORT_CACHING = "hbase.export.scanner.caching";
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
index ffe1c85b1d..9bd0530226 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
@@ -40,8 +40,6 @@ import java.util.function.Function;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -91,6 +89,8 @@ import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -105,7 +105,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
@InterfaceAudience.Public
public class HFileOutputFormat2
extends FileOutputFormat {
- private static final Log LOG = LogFactory.getLog(HFileOutputFormat2.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HFileOutputFormat2.class);
static class TableInfo {
private TableDescriptor tableDesctiptor;
private RegionLocator regionLocator;
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java
index d8c2314b22..b48ecf02a0 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java
@@ -19,10 +19,9 @@
package org.apache.hadoop.hbase.mapreduce;
import java.io.IOException;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -53,7 +52,7 @@ public class HRegionPartitioner
extends Partitioner
implements Configurable {
- private static final Log LOG = LogFactory.getLog(HRegionPartitioner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HRegionPartitioner.class);
private Configuration conf = null;
// Connection and locator are not cleaned up; they just die when partitioner is done.
private Connection connection;
@@ -86,7 +85,7 @@ implements Configurable {
// here if a region splits while mapping
region = this.locator.getRegionLocation(key.get()).getRegionInfo().getStartKey();
} catch (IOException e) {
- LOG.error(e);
+ LOG.error(e.toString(), e);
}
for (int i = 0; i < this.startKeys.length; i++){
if (Bytes.compareTo(region, this.startKeys[i]) == 0 ){
@@ -129,12 +128,12 @@ implements Configurable {
TableName tableName = TableName.valueOf(conf.get(TableOutputFormat.OUTPUT_TABLE));
this.locator = this.connection.getRegionLocator(tableName);
} catch (IOException e) {
- LOG.error(e);
+ LOG.error(e.toString(), e);
}
try {
this.startKeys = this.locator.getStartKeys();
} catch (IOException e) {
- LOG.error(e);
+ LOG.error(e.toString(), e);
}
}
}
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java
index 2c8caf503a..e68ac3b354 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java
@@ -27,8 +27,6 @@ import java.util.Collections;
import java.util.List;
import java.util.Properties;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FSDataInputStream;
@@ -56,14 +54,15 @@ import org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Charsets;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Ordering;
public class HashTable extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(HashTable.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HashTable.class);
private static final int DEFAULT_BATCH_SIZE = 8000;
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.java
index 76c1f607be..876953c862 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.mapreduce;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.io.Writable;
@@ -53,7 +53,7 @@ public class IdentityTableReducer
extends TableReducer {
@SuppressWarnings("unused")
- private static final Log LOG = LogFactory.getLog(IdentityTableReducer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(IdentityTableReducer.class);
/**
* Writes each given record, consisting of the row key and the given values,
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
index bf0081b28a..e8e1de0093 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
@@ -33,8 +33,6 @@ import java.util.Map;
import java.util.TreeMap;
import java.util.UUID;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
@@ -79,6 +77,8 @@ import org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
@@ -86,7 +86,7 @@ import org.apache.zookeeper.KeeperException;
*/
@InterfaceAudience.Public
public class Import extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(Import.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Import.class);
final static String NAME = "import";
public final static String CF_RENAME_PROP = "HBASE_IMPORTER_RENAME_CFS";
public final static String BULK_OUTPUT_CONF_KEY = "import.bulk.output";
@@ -192,7 +192,7 @@ public class Import extends Configured implements Tool {
extends TableMapper {
private Map cfRenameMap;
private Filter filter;
- private static final Log LOG = LogFactory.getLog(CellImporter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CellImporter.class);
/**
* @param row The current table row key.
@@ -256,7 +256,7 @@ public class Import extends Configured implements Tool {
public static class CellImporter extends TableMapper {
private Map cfRenameMap;
private Filter filter;
- private static final Log LOG = LogFactory.getLog(CellImporter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CellImporter.class);
/**
* @param row The current table row key.
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
index d672803b4b..678377d582 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
@@ -27,8 +27,6 @@ import java.util.HashSet;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
@@ -40,6 +38,8 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotEnabledException;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -77,7 +77,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@InterfaceAudience.Public
public class ImportTsv extends Configured implements Tool {
- protected static final Log LOG = LogFactory.getLog(ImportTsv.class);
+ protected static final Logger LOG = LoggerFactory.getLogger(ImportTsv.class);
final static String NAME = "importtsv";
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.java
index b7e9479973..03834f2b8e 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.java
@@ -18,9 +18,9 @@
package org.apache.hadoop.hbase.mapreduce;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
@@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
@InterfaceAudience.Public
@VisibleForTesting
public class MultiTableHFileOutputFormat extends HFileOutputFormat2 {
- private static final Log LOG = LogFactory.getLog(MultiTableHFileOutputFormat.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MultiTableHFileOutputFormat.class);
/**
* Creates a composite key to use as a mapper output key when using
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
index 82a86b4047..d8205c1a0c 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
@@ -22,9 +22,9 @@ import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.TableName;
@@ -55,7 +55,7 @@ import java.util.Iterator;
public abstract class MultiTableInputFormatBase extends
InputFormat {
- private static final Log LOG = LogFactory.getLog(MultiTableInputFormatBase.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MultiTableInputFormatBase.class);
/** Holds the set of scans used to define the input. */
private List scans;
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java
index 4cf50f23f1..2a4fae9440 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java
@@ -22,9 +22,9 @@ import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
@@ -72,7 +72,7 @@ public class MultiTableOutputFormat extends OutputFormat {
- private static final Log LOG = LogFactory.getLog(MultiTableRecordWriter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MultiTableRecordWriter.class);
Connection connection;
Map mutatorMap = new HashMap<>();
Configuration conf;
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormatImpl.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormatImpl.java
index b5cba645a8..97fafce99a 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormatImpl.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormatImpl.java
@@ -18,28 +18,29 @@
package org.apache.hadoop.hbase.mapreduce;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import java.io.IOException;
+import java.util.AbstractMap;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.yetus.audience.InterfaceStability;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper;
import org.apache.hadoop.hbase.snapshot.SnapshotManifest;
import org.apache.hadoop.hbase.util.ConfigurationUtil;
import org.apache.hadoop.hbase.util.FSUtils;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
-import java.io.IOException;
-import java.util.AbstractMap;
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-import java.util.UUID;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
/**
* Shared implementation of mapreduce code over multiple table snapshots.
@@ -50,8 +51,8 @@ import java.util.UUID;
@InterfaceAudience.LimitedPrivate({ "HBase" })
@InterfaceStability.Evolving
public class MultiTableSnapshotInputFormatImpl {
-
- private static final Log LOG = LogFactory.getLog(MultiTableSnapshotInputFormatImpl.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(MultiTableSnapshotInputFormatImpl.class);
public static final String RESTORE_DIRS_KEY =
"hbase.MultiTableSnapshotInputFormat.restore.snapshotDirMapping";
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java
index a505379664..626deffda4 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java
@@ -23,8 +23,6 @@ import java.lang.reflect.Method;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@@ -41,6 +39,8 @@ import org.apache.hadoop.mapreduce.StatusReporter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.util.ReflectionUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
@@ -58,7 +58,7 @@ import org.apache.hadoop.util.ReflectionUtils;
*/
public class MultithreadedTableMapper extends TableMapper {
- private static final Log LOG = LogFactory.getLog(MultithreadedTableMapper.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MultithreadedTableMapper.class);
private Class extends Mapper> mapClass;
private Context outer;
private ExecutorService executor;
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutCombiner.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutCombiner.java
index 7da2f9b3b3..317b328df7 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutCombiner.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutCombiner.java
@@ -23,9 +23,9 @@ import java.util.List;
import java.util.Map.Entry;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
@@ -39,7 +39,7 @@ import org.apache.hadoop.mapreduce.Reducer;
*/
@InterfaceAudience.Public
public class PutCombiner extends Reducer {
- private static final Log LOG = LogFactory.getLog(PutCombiner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(PutCombiner.class);
@Override
protected void reduce(K row, Iterable vals, Context context)
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RegionSizeCalculator.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RegionSizeCalculator.java
index 33f09cfe00..d1c5fc207f 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RegionSizeCalculator.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RegionSizeCalculator.java
@@ -24,17 +24,18 @@ import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.RegionLoad;
import org.apache.hadoop.hbase.ServerName;
-import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
/**
* Computes size of each region for given table and given column families.
@@ -43,7 +44,7 @@ import org.apache.hadoop.hbase.util.Bytes;
@InterfaceAudience.Private
public class RegionSizeCalculator {
- private static final Log LOG = LogFactory.getLog(RegionSizeCalculator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionSizeCalculator.class);
/**
* Maps each region to its size in bytes.
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java
index c9f3022bd3..dac1d425d8 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java
@@ -25,13 +25,13 @@ import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
@@ -42,7 +42,7 @@ import org.apache.hadoop.io.serializer.Serializer;
@InterfaceAudience.Public
public class ResultSerialization extends Configured implements Serialization {
- private static final Log LOG = LogFactory.getLog(ResultSerialization.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ResultSerialization.class);
// The following configuration property indicates import file format version.
public static final String IMPORT_FORMAT_VER = "hbase.import.version";
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java
index ea89c928ff..9c7b489181 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java
@@ -22,11 +22,11 @@ import java.io.IOException;
import java.util.List;
import java.util.ArrayList;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -50,7 +50,7 @@ import org.apache.hadoop.util.ToolRunner;
@InterfaceAudience.Public
public class RowCounter extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(RowCounter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RowCounter.class);
/** Name of this 'program'. */
static final String NAME = "rowcounter";
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.java
index ad65e49bc3..1c31eda294 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.java
@@ -18,9 +18,9 @@
*/
package org.apache.hadoop.hbase.mapreduce;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@@ -46,7 +46,7 @@ import org.apache.hadoop.mapreduce.Partitioner;
@InterfaceAudience.Public
public class SimpleTotalOrderPartitioner extends Partitioner
implements Configurable {
- private final static Log LOG = LogFactory.getLog(SimpleTotalOrderPartitioner.class);
+ private final static Logger LOG = LoggerFactory.getLogger(SimpleTotalOrderPartitioner.class);
@Deprecated
public static final String START = "hbase.simpletotalorder.start";
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java
index eff1596336..b55990c6b5 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java
@@ -21,8 +21,6 @@ import java.io.IOException;
import java.util.Iterator;
import java.util.Collections;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileStatus;
@@ -50,12 +48,13 @@ import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
public class SyncTable extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(SyncTable.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SyncTable.class);
static final String SOURCE_HASH_DIR_CONF_KEY = "sync.table.source.hash.dir";
static final String SOURCE_TABLE_CONF_KEY = "sync.table.source.table.name";
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java
index 9eefac9def..480c6118b0 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java
@@ -23,13 +23,13 @@ import java.util.Collections;
import java.util.List;
import java.util.Locale;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.RegionLocator;
@@ -49,7 +49,7 @@ public class TableInputFormat extends TableInputFormatBase
implements Configurable {
@SuppressWarnings("hiding")
- private static final Log LOG = LogFactory.getLog(TableInputFormat.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableInputFormat.class);
/** Job parameter that specifies the input table. */
public static final String INPUT_TABLE = "hbase.mapreduce.inputtable";
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
index fa2e6a2f81..5acbe2c2c1 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
@@ -28,9 +28,9 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.TableName;
@@ -109,7 +109,7 @@ import org.apache.hadoop.util.StringUtils;
public abstract class TableInputFormatBase
extends InputFormat {
- private static final Log LOG = LogFactory.getLog(TableInputFormatBase.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableInputFormatBase.class);
private static final String NOT_INITIALIZED = "The input format instance has not been properly " +
"initialized. Ensure you call initializeTable either in your constructor or initialize " +
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
index dc3bb61a64..2856a7d608 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
@@ -33,8 +33,6 @@ import java.util.Set;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -43,6 +41,8 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
@@ -70,7 +70,7 @@ import com.codahale.metrics.MetricRegistry;
@SuppressWarnings({ "rawtypes", "unchecked" })
@InterfaceAudience.Public
public class TableMapReduceUtil {
- private static final Log LOG = LogFactory.getLog(TableMapReduceUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableMapReduceUtil.class);
/**
* Use this before submitting a TableMap job. It will appropriately set up
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.java
index 07a2a08cc1..7598520a48 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.java
@@ -19,10 +19,9 @@
package org.apache.hadoop.hbase.mapreduce;
import java.io.IOException;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -52,7 +51,7 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext;
public class TableOutputFormat extends OutputFormat
implements Configurable {
- private static final Log LOG = LogFactory.getLog(TableOutputFormat.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableOutputFormat.class);
/** Job parameter that specifies the output table. */
public static final String OUTPUT_TABLE = "hbase.mapred.outputtable";
@@ -232,7 +231,7 @@ implements Configurable {
this.conf.setInt(HConstants.ZOOKEEPER_CLIENT_PORT, zkClientPort);
}
} catch(IOException e) {
- LOG.error(e);
+ LOG.error(e.toString(), e);
throw new RuntimeException(e);
}
}
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java
index 511994b50d..40c0e7c627 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java
@@ -20,10 +20,9 @@ package org.apache.hadoop.hbase.mapreduce;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.Map;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
@@ -50,7 +49,7 @@ public class TableRecordReaderImpl {
public static final String LOG_PER_ROW_COUNT
= "hbase.mapreduce.log.scanner.rowcount";
- private static final Log LOG = LogFactory.getLog(TableRecordReaderImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableRecordReaderImpl.class);
// HBASE_COUNTER_GROUP_NAME is the name of mapreduce counter group for HBase
@VisibleForTesting
@@ -254,7 +253,7 @@ public class TableRecordReaderImpl {
long now = System.currentTimeMillis();
LOG.info("Mapper took " + (now-timestamp)
+ "ms to process " + rowcount + " rows");
- LOG.info(ioe);
+ LOG.info(ioe.toString(), ioe);
String lastRow = lastSuccessfulRow == null ?
"null" : Bytes.toStringBinary(lastSuccessfulRow);
LOG.info("lastSuccessfulRow=" + lastRow);
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java
index 53eb9f40dc..c447e332fc 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java
@@ -18,10 +18,14 @@
package org.apache.hadoop.hbase.mapreduce;
-import org.apache.hadoop.hbase.client.TableDescriptor;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.UUID;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -29,16 +33,12 @@ import org.apache.hadoop.hbase.HDFSBlocksDistribution;
import org.apache.hadoop.hbase.HDFSBlocksDistribution.HostAndWeight;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.PrivateCellUtil;
-import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.ClientSideRegionScanner;
import org.apache.hadoop.hbase.client.IsolationLevel;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper;
import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
@@ -47,14 +47,15 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.RegionSplitter;
import org.apache.hadoop.io.Writable;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
-import java.io.ByteArrayOutputStream;
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.UUID;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
/**
* Hadoop MR API-agnostic implementation for mapreduce over table snapshots.
@@ -64,7 +65,7 @@ public class TableSnapshotInputFormatImpl {
// TODO: Snapshots files are owned in fs by the hbase user. There is no
// easy way to delegate access.
- public static final Log LOG = LogFactory.getLog(TableSnapshotInputFormatImpl.class);
+ public static final Logger LOG = LoggerFactory.getLogger(TableSnapshotInputFormatImpl.class);
private static final String SNAPSHOT_NAME_KEY = "hbase.TableSnapshotInputFormat.snapshot.name";
// key for specifying the root dir of the restored snapshot
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java
index 19614afbad..de42c31678 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java
@@ -23,9 +23,9 @@ import java.io.DataOutput;
import java.io.IOException;
import java.util.Arrays;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.client.Scan;
@@ -43,7 +43,7 @@ public class TableSplit extends InputSplit
implements Writable, Comparable {
/** @deprecated LOG variable would be made private. fix in hbase 3.0 */
@Deprecated
- public static final Log LOG = LogFactory.getLog(TableSplit.class);
+ public static final Logger LOG = LoggerFactory.getLogger(TableSplit.class);
// should be < 0 (@see #readFields(DataInput))
// version 1 supports Scan data member
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALInputFormat.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALInputFormat.java
index 796acb925f..1815412721 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALInputFormat.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALInputFormat.java
@@ -26,8 +26,6 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -35,6 +33,8 @@ import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
import org.apache.hadoop.hbase.wal.WAL;
@@ -55,7 +55,7 @@ import org.apache.hadoop.util.StringUtils;
*/
@InterfaceAudience.Public
public class WALInputFormat extends InputFormat {
- private static final Log LOG = LogFactory.getLog(WALInputFormat.class);
+ private static final Logger LOG = LoggerFactory.getLogger(WALInputFormat.class);
public static final String START_TIME_KEY = "wal.start.time";
public static final String END_TIME_KEY = "wal.end.time";
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java
index 15c33cbe7e..7212d4be42 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java
@@ -23,8 +23,6 @@ import java.text.SimpleDateFormat;
import java.util.Map;
import java.util.TreeMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
@@ -52,6 +50,8 @@ import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A tool to replay WAL files as a M/R job.
@@ -65,7 +65,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Public
public class WALPlayer extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(WALPlayer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(WALPlayer.class);
final static String NAME = "WALPlayer";
public final static String BULK_OUTPUT_CONF_KEY = "wal.bulk.output";
public final static String TABLES_KEY = "wal.input.tables";
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java
index b8de9ec088..01df2bd6d3 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.mapreduce.replication;
import java.io.IOException;
import java.util.Arrays;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
@@ -64,7 +62,8 @@ import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -79,8 +78,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
public class VerifyReplication extends Configured implements Tool {
- private static final Log LOG =
- LogFactory.getLog(VerifyReplication.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(VerifyReplication.class);
public final static String NAME = "verifyrep";
private final static String PEER_CONFIG_PREFIX = NAME + ".peer.";
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java
index aec5fa08b9..746bb5ff82 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java
@@ -27,8 +27,6 @@ import java.util.List;
import java.util.Optional;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -63,6 +61,8 @@ import org.apache.hadoop.util.LineReader;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/*
* The CompactionTool allows to execute a compaction specifying a:
@@ -74,7 +74,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
public class CompactionTool extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(CompactionTool.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CompactionTool.class);
private final static String CONF_TMP_DIR = "hbase.tmp.dir";
private final static String CONF_COMPACT_ONCE = "hbase.compactiontool.compact.once";
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
index 462d6bc37a..66e9e3bfc9 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
@@ -32,8 +32,6 @@ import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -73,7 +71,8 @@ import org.apache.hadoop.mapreduce.security.TokenCache;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Tool;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
@@ -93,7 +92,7 @@ public class ExportSnapshot extends AbstractHBaseTool implements Tool {
/** Configuration prefix for overrides for the destination filesystem */
public static final String CONF_DEST_PREFIX = NAME + ".to.";
- private static final Log LOG = LogFactory.getLog(ExportSnapshot.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ExportSnapshot.class);
private static final String MR_NUM_MAPS = "mapreduce.job.maps";
private static final String CONF_NUM_SPLITS = "snapshot.export.format.splits";
@@ -153,7 +152,7 @@ public class ExportSnapshot extends AbstractHBaseTool implements Tool {
private static class ExportMapper extends Mapper {
- private static final Log LOG = LogFactory.getLog(ExportMapper.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ExportMapper.class);
final static int REPORT_SIZE = 1 * 1024 * 1024;
final static int BUFFER_SIZE = 64 * 1024;
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index ef2d6d0c00..56be28816d 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -47,8 +47,6 @@ import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
@@ -106,7 +104,8 @@ import org.apache.htrace.core.ProbabilitySampler;
import org.apache.htrace.core.Sampler;
import org.apache.htrace.core.TraceScope;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects;
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
@@ -131,7 +130,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa
public class PerformanceEvaluation extends Configured implements Tool {
static final String RANDOM_SEEK_SCAN = "randomSeekScan";
static final String RANDOM_READ = "randomRead";
- private static final Log LOG = LogFactory.getLog(PerformanceEvaluation.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(PerformanceEvaluation.class.getName());
private static final ObjectMapper MAPPER = new ObjectMapper();
static {
MAPPER.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, true);
@@ -361,7 +360,8 @@ public class PerformanceEvaluation extends Configured implements Tool {
.add("desc", desc)
.add("presplit", opts.presplitRegions)
.add("splitPolicy", opts.splitPolicy)
- .add("replicas", opts.replicas));
+ .add("replicas", opts.replicas)
+ .toString());
}
// remove an existing table
@@ -1989,7 +1989,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
}
static class FilteredScanTest extends TableTest {
- protected static final Log LOG = LogFactory.getLog(FilteredScanTest.class.getName());
+ protected static final Logger LOG = LoggerFactory.getLogger(FilteredScanTest.class.getName());
FilteredScanTest(Connection con, TestOptions options, Status status) {
super(con, options, status);
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestMultiTableSnapshotInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestMultiTableSnapshotInputFormat.java
index 665c547019..1b137792fb 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestMultiTableSnapshotInputFormat.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestMultiTableSnapshotInputFormat.java
@@ -18,9 +18,12 @@
package org.apache.hadoop.hbase.mapred;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.List;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.client.Result;
@@ -36,18 +39,17 @@ import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.RunningJob;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.List;
-
-import static org.junit.Assert.assertTrue;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@Category({ VerySlowMapReduceTests.class, LargeTests.class })
public class TestMultiTableSnapshotInputFormat
extends org.apache.hadoop.hbase.mapreduce.TestMultiTableSnapshotInputFormat {
- private static final Log LOG = LogFactory.getLog(TestMultiTableSnapshotInputFormat.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestMultiTableSnapshotInputFormat.class);
@Override
protected void runJob(String jobName, Configuration c, List scans)
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java
index ace2ffab40..369f1c1174 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java
@@ -32,8 +32,6 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.Cell;
@@ -68,6 +66,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This tests the TableInputFormat and its recovery semantics
@@ -75,7 +75,7 @@ import org.mockito.stubbing.Answer;
@Category({MapReduceTests.class, LargeTests.class})
public class TestTableInputFormat {
- private static final Log LOG = LogFactory.getLog(TestTableInputFormat.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestTableInputFormat.class);
private final static HBaseTestingUtility UTIL = new HBaseTestingUtility();
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java
index 3f905cffea..d300e7d745 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java
@@ -23,8 +23,6 @@ import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.testclassification.LargeTests;
@@ -41,6 +39,8 @@ import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.RunningJob;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test Map/Reduce job over HBase tables. The map/reduce process we're testing
@@ -50,10 +50,10 @@ import org.junit.experimental.categories.Category;
@Category({MapReduceTests.class, LargeTests.class})
@SuppressWarnings("deprecation")
public class TestTableMapReduce extends TestTableMapReduceBase {
- private static final Log LOG =
- LogFactory.getLog(TestTableMapReduce.class.getName());
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestTableMapReduce.class.getName());
- protected Log getLog() { return LOG; }
+ protected Logger getLog() { return LOG; }
/**
* Pass the given key and processed record reduce
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java
index ac2f20d895..4a60110648 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java
@@ -28,8 +28,6 @@ import java.util.Iterator;
import java.util.List;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -53,15 +51,16 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet;
@Category({MapReduceTests.class, LargeTests.class})
public class TestTableMapReduceUtil {
- private static final Log LOG = LogFactory
- .getLog(TestTableMapReduceUtil.class);
+ private static final Logger LOG = LoggerFactory
+ .getLogger(TestTableMapReduceUtil.class);
private static Table presidentsTable;
private static final String TABLE_NAME = "People";
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableOutputFormatConnectionExhaust.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableOutputFormatConnectionExhaust.java
index 835117c020..785380f5a0 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableOutputFormatConnectionExhaust.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableOutputFormatConnectionExhaust.java
@@ -17,8 +17,6 @@
*/
package org.apache.hadoop.hbase.mapred;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@@ -30,6 +28,8 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
@@ -44,8 +44,8 @@ import static org.junit.Assert.fail;
@Category(MediumTests.class)
public class TestTableOutputFormatConnectionExhaust {
- private static final Log LOG =
- LogFactory.getLog(TestTableOutputFormatConnectionExhaust.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestTableOutputFormatConnectionExhaust.class);
private final static HBaseTestingUtility UTIL = new HBaseTestingUtility();
static final String TABLE = "TestTableOutputFormatConnectionExhaust";
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java
index c717fa96a9..b8d03bac52 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java
@@ -18,9 +18,17 @@
package org.apache.hadoop.hbase.mapreduce;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.NavigableMap;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
@@ -42,17 +50,10 @@ import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-import java.util.NavigableMap;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
/**
* Base set of tests and setup for input formats touching multiple tables.
@@ -60,7 +61,7 @@ import static org.junit.Assert.assertTrue;
public abstract class MultiTableInputFormatTestBase {
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().
withTimeout(this.getClass()).withLookingForStuckThread(true).build();
- static final Log LOG = LogFactory.getLog(TestMultiTableInputFormat.class);
+ static final Logger LOG = LoggerFactory.getLogger(TestMultiTableInputFormat.class);
public static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
static final String TABLE_NAME = "scantest";
static final byte[] INPUT_FAMILY = Bytes.toBytes("contents");
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java
index 4e11275b23..d753d40add 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java
@@ -18,8 +18,6 @@
package org.apache.hadoop.hbase.mapreduce;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -44,6 +42,8 @@ import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertFalse;
@@ -51,7 +51,7 @@ import java.io.IOException;
import java.util.Arrays;
public abstract class TableSnapshotInputFormatTestBase {
- private static final Log LOG = LogFactory.getLog(TableSnapshotInputFormatTestBase.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableSnapshotInputFormatTestBase.class);
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().
withTimeout(this.getClass()).withLookingForStuckThread(true).build();
protected final HBaseTestingUtility UTIL = new HBaseTestingUtility();
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
index aa2984f7f9..09226f606e 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
@@ -40,8 +40,6 @@ import java.util.concurrent.Callable;
import java.util.stream.Collectors;
import java.util.stream.Stream;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -110,6 +108,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestRule;
import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Simple test for {@link HFileOutputFormat2}.
@@ -131,7 +131,7 @@ public class TestHFileOutputFormat2 {
private HBaseTestingUtility util = new HBaseTestingUtility();
- private static final Log LOG = LogFactory.getLog(TestHFileOutputFormat2.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHFileOutputFormat2.class);
/**
* Simple mapper that makes KeyValue output.
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java
index 87e7852b63..6b3c71ce2b 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java
@@ -22,8 +22,6 @@ import static org.junit.Assert.assertEquals;
import java.util.HashMap;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -45,6 +43,8 @@ import org.junit.experimental.categories.Category;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Basic test for the HashTable M/R tool
@@ -52,7 +52,7 @@ import org.junit.rules.TestName;
@Category(LargeTests.class)
public class TestHashTable {
- private static final Log LOG = LogFactory.getLog(TestHashTable.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHashTable.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
index 2d8a2bd75b..aec9566538 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
@@ -35,8 +35,6 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -88,6 +86,8 @@ import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tests the table import and table export MR job functionality
@@ -95,7 +95,7 @@ import org.mockito.stubbing.Answer;
@Category({VerySlowMapReduceTests.class, MediumTests.class})
public class TestImportExport {
- private static final Log LOG = LogFactory.getLog(TestImportExport.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestImportExport.class);
protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final byte[] ROW1 = Bytes.toBytesBinary("\\x32row1");
private static final byte[] ROW2 = Bytes.toBytesBinary("\\x32row2");
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java
index a47bef1dcd..5d4c8a3ad7 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java
@@ -27,8 +27,6 @@ import java.util.List;
import java.util.Optional;
import java.util.UUID;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -66,12 +64,15 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MapReduceTests.class, LargeTests.class})
public class TestImportTSVWithOperationAttributes implements Configurable {
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().
withTimeout(this.getClass()).withLookingForStuckThread(true).build();
- private static final Log LOG = LogFactory.getLog(TestImportTSVWithOperationAttributes.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestImportTSVWithOperationAttributes.class);
protected static final String NAME = TestImportTsv.class.getSimpleName();
protected static HBaseTestingUtility util = new HBaseTestingUtility();
@@ -95,10 +96,12 @@ public class TestImportTSVWithOperationAttributes implements Configurable {
@Rule
public TestName name = new TestName();
+ @Override
public Configuration getConf() {
return util.getConfiguration();
}
+ @Override
public void setConf(Configuration conf) {
throw new IllegalArgumentException("setConf not supported");
}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java
index f121f20a0f..9ddbc65dda 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java
@@ -26,8 +26,6 @@ import java.util.List;
import java.util.Optional;
import java.util.UUID;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -54,11 +52,13 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MapReduceTests.class, LargeTests.class})
public class TestImportTSVWithTTLs implements Configurable {
- protected static final Log LOG = LogFactory.getLog(TestImportTSVWithTTLs.class);
+ protected static final Logger LOG = LoggerFactory.getLogger(TestImportTSVWithTTLs.class);
protected static final String NAME = TestImportTsv.class.getSimpleName();
protected static HBaseTestingUtility util = new HBaseTestingUtility();
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
index 469284733f..8d3f3df106 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
@@ -29,8 +29,6 @@ import java.util.List;
import java.util.Set;
import java.util.UUID;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -74,11 +72,14 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MapReduceTests.class, LargeTests.class})
public class TestImportTSVWithVisibilityLabels implements Configurable {
- private static final Log LOG = LogFactory.getLog(TestImportTSVWithVisibilityLabels.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestImportTSVWithVisibilityLabels.class);
protected static final String NAME = TestImportTsv.class.getSimpleName();
protected static HBaseTestingUtility util = new HBaseTestingUtility();
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
index f6fcfa38ec..9484a94a39 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
@@ -32,8 +32,6 @@ import java.util.Map;
import java.util.Set;
import java.util.UUID;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -71,11 +69,13 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.ExpectedException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({VerySlowMapReduceTests.class, LargeTests.class})
public class TestImportTsv implements Configurable {
- private static final Log LOG = LogFactory.getLog(TestImportTsv.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestImportTsv.class);
protected static final String NAME = TestImportTsv.class.getSimpleName();
protected static HBaseTestingUtility util = new HBaseTestingUtility();
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestJarFinder.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestJarFinder.java
index 8187b73aa4..7eeee707ea 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestJarFinder.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestJarFinder.java
@@ -18,11 +18,11 @@
package org.apache.hadoop.hbase.mapreduce;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Assert;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.LoggerFactory;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
@@ -48,7 +48,7 @@ public class TestJarFinder {
public void testJar() throws Exception {
//picking a class that is for sure in a JAR in the classpath
- String jar = JarFinder.getJar(LogFactory.class);
+ String jar = JarFinder.getJar(LoggerFactory.class);
Assert.assertTrue(new File(jar).exists());
}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormat.java
index 530d9c57e3..354f5e77ec 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormat.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormat.java
@@ -18,10 +18,11 @@
package org.apache.hadoop.hbase.mapreduce;
-import org.apache.hadoop.hbase.shaded.com.google.common.base.Function;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimaps;
-import edu.umd.cs.findbugs.annotations.Nullable;
+import java.io.IOException;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Scan;
@@ -36,10 +37,11 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.experimental.categories.Category;
-import java.io.IOException;
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.Function;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimaps;
+
+import edu.umd.cs.findbugs.annotations.Nullable;
@Category({ VerySlowMapReduceTests.class, LargeTests.class })
public class TestMultiTableSnapshotInputFormat extends MultiTableInputFormatTestBase {
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
index 694a359a5a..357f3750ef 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
@@ -26,8 +26,6 @@ import java.util.Iterator;
import java.util.Map;
import java.util.NavigableMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HConstants;
@@ -50,6 +48,8 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test Map/Reduce job over HBase tables. The map/reduce process we're testing
@@ -58,7 +58,7 @@ import org.junit.experimental.categories.Category;
*/
@Category({MapReduceTests.class, LargeTests.class})
public class TestMultithreadedTableMapper {
- private static final Log LOG = LogFactory.getLog(TestMultithreadedTableMapper.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestMultithreadedTableMapper.class);
private static final HBaseTestingUtility UTIL =
new HBaseTestingUtility();
static final TableName MULTI_REGION_TABLE_NAME = TableName.valueOf("mrtest");
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java
index 3b84e2d2d3..aba1714753 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java
@@ -27,8 +27,6 @@ import java.io.IOException;
import java.io.PrintStream;
import java.util.ArrayList;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName;
@@ -46,6 +44,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test the rowcounter map reduce job.
@@ -54,7 +54,7 @@ import org.junit.rules.TestRule;
public class TestRowCounter {
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().
withTimeout(this.getClass()).withLookingForStuckThread(true).build();
- private static final Log LOG = LogFactory.getLog(TestRowCounter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRowCounter.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private final static String TABLE_NAME = "testRowCounter";
private final static String TABLE_NAME_TS_RANGE = "testRowCounter_ts_range";
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSyncTable.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSyncTable.java
index 1e940d46cf..e2a04241a5 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSyncTable.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSyncTable.java
@@ -22,8 +22,6 @@ import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.util.Arrays;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
@@ -48,7 +46,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
/**
@@ -58,7 +57,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
public class TestSyncTable {
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().
withTimeout(this.getClass()).withLookingForStuckThread(true).build();
- private static final Log LOG = LogFactory.getLog(TestSyncTable.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSyncTable.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java
index db50899cfb..5453054373 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java
@@ -32,8 +32,6 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -64,6 +62,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This tests the TableInputFormat and its recovery semantics
@@ -72,7 +72,7 @@ import org.mockito.stubbing.Answer;
@Category(LargeTests.class)
public class TestTableInputFormat {
- private static final Log LOG = LogFactory.getLog(TestTableInputFormat.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestTableInputFormat.class);
private final static HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static MiniMRCluster mrCluster;
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java
index d127adb22f..3d97071379 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java
@@ -25,8 +25,6 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.*;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -47,6 +45,8 @@ import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
@@ -62,7 +62,7 @@ import org.junit.BeforeClass;
*/
public abstract class TestTableInputFormatScanBase {
- private static final Log LOG = LogFactory.getLog(TestTableInputFormatScanBase.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestTableInputFormatScanBase.class);
static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
static final TableName TABLE_NAME = TableName.valueOf("scantest");
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java
index d702e0d3c9..9c38a0ddf3 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java
@@ -27,8 +27,6 @@ import java.io.IOException;
import java.util.Map;
import java.util.NavigableMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.TableName;
@@ -49,6 +47,8 @@ import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test Map/Reduce job over HBase tables. The map/reduce process we're testing
@@ -58,10 +58,10 @@ import org.junit.experimental.categories.Category;
@Category({VerySlowMapReduceTests.class, LargeTests.class})
public class TestTableMapReduce extends TestTableMapReduceBase {
- private static final Log LOG = LogFactory.getLog(TestTableMapReduce.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestTableMapReduce.class);
@Override
- protected Log getLog() { return LOG; }
+ protected Logger getLog() { return LOG; }
/**
* Pass the given key and processed record reduce
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
index 27bf0637ec..60e2622856 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
@@ -26,7 +26,6 @@ import java.util.Iterator;
import java.util.Map;
import java.util.NavigableMap;
-import org.apache.commons.logging.Log;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.Cell;
@@ -46,6 +45,7 @@ import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
/**
* A base class for a test Map/Reduce job over HBase tables. The map/reduce process we're testing
@@ -70,7 +70,7 @@ public abstract class TestTableMapReduceBase {
/**
* Retrieve my logger instance.
*/
- protected abstract Log getLog();
+ protected abstract Logger getLog();
/**
* Handles API-specifics for setting up and executing the job.
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java
index 2ed6081e31..11b7657fc3 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java
@@ -27,8 +27,6 @@ import static org.mockito.Mockito.when;
import java.io.IOException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
@@ -58,7 +56,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import java.util.Arrays;
@@ -71,7 +70,7 @@ import org.apache.hadoop.hbase.util.RegionSplitter;
@Category({VerySlowMapReduceTests.class, LargeTests.class})
public class TestTableSnapshotInputFormat extends TableSnapshotInputFormatTestBase {
- private static final Log LOG = LogFactory.getLog(TestTableSnapshotInputFormat.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestTableSnapshotInputFormat.class);
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().
withTimeout(this.getClass()).withLookingForStuckThread(true).build();
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
index 6796c944ad..3f0c591f2f 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
@@ -18,8 +18,6 @@
*/
package org.apache.hadoop.hbase.mapreduce;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
@@ -51,6 +49,8 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
@@ -62,7 +62,7 @@ import java.util.TreeMap;
@Category({MapReduceTests.class, LargeTests.class})
public class TestTimeRangeMapRed {
- private final static Log log = LogFactory.getLog(TestTimeRangeMapRed.class);
+ private final static Logger log = LoggerFactory.getLogger(TestTimeRangeMapRed.class);
private static final HBaseTestingUtility UTIL =
new HBaseTestingUtility();
private Admin admin;
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java
index 65a3421461..18bb1353ee 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java
@@ -26,8 +26,6 @@ import java.util.NavigableMap;
import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -57,13 +55,15 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* JUnit tests for the WALRecordReader
*/
@Category({MapReduceTests.class, MediumTests.class})
public class TestWALRecordReader {
- private static final Log LOG = LogFactory.getLog(TestWALRecordReader.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestWALRecordReader.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static Configuration conf;
private static FileSystem fs;
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
index 143f585dbd..8aefa4d169 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
@@ -30,8 +30,6 @@ import java.util.List;
import java.util.NavigableMap;
import java.util.TreeMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -84,14 +82,15 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
@Category({ReplicationTests.class, LargeTests.class})
public class TestReplicationSmallTests extends TestReplicationBase {
- private static final Log LOG = LogFactory.getLog(TestReplicationSmallTests.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestReplicationSmallTests.class);
private static final String PEER_ID = "2";
@Rule
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java
index 98d6311b8f..8703ca0206 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java
@@ -28,10 +28,9 @@ import java.net.URI;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
+import java.util.Objects;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -51,13 +50,13 @@ import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
-import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
@@ -68,7 +67,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot
public class TestExportSnapshot {
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().
withTimeout(this.getClass()).withLookingForStuckThread(true).build();
- private static final Log LOG = LogFactory.getLog(TestExportSnapshot.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestExportSnapshot.class);
protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@@ -350,7 +349,7 @@ public class TestExportSnapshot {
FileStatus[] list = FSUtils.listStatus(fs, dir);
if (list != null) {
for (FileStatus fstat: list) {
- LOG.debug(fstat.getPath());
+ LOG.debug(Objects.toString(fstat.getPath()));
if (fstat.isDirectory()) {
files.addAll(listFiles(fs, root, fstat.getPath()));
} else {
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotNoCluster.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotNoCluster.java
index 0077850234..f3d08ba52f 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotNoCluster.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotNoCluster.java
@@ -18,8 +18,6 @@
package org.apache.hadoop.hbase.snapshot;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -37,6 +35,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test Export Snapshot Tool
@@ -45,7 +45,7 @@ import org.junit.rules.TestRule;
public class TestExportSnapshotNoCluster {
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().
withTimeout(this.getClass()).withLookingForStuckThread(true).build();
- private static final Log LOG = LogFactory.getLog(TestExportSnapshotNoCluster.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestExportSnapshotNoCluster.class);
protected final static HBaseCommonTestingUtility TEST_UTIL = new HBaseCommonTestingUtility();
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
index 1d9b74e989..93a4798eaf 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
@@ -29,8 +29,6 @@ import java.util.concurrent.atomic.AtomicReference;
import javax.crypto.spec.SecretKeySpec;
import org.apache.commons.cli.CommandLine;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@@ -40,6 +38,8 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -49,6 +49,7 @@ import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.crypto.Cipher;
import org.apache.hadoop.hbase.io.crypto.Encryption;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.hbase.security.EncryptionUtil;
import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
@@ -67,7 +68,7 @@ import org.apache.hadoop.util.ToolRunner;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
public class LoadTestTool extends AbstractHBaseTool {
- private static final Log LOG = LogFactory.getLog(LoadTestTool.class);
+ private static final Logger LOG = LoggerFactory.getLogger(LoadTestTool.class);
private static final String COLON = ":";
/** Table name for the test */
@@ -579,7 +580,7 @@ public class LoadTestTool extends AbstractHBaseTool {
try {
addAuthInfoToConf(authConfig, conf, superUser, userNames);
} catch (IOException exp) {
- LOG.error(exp);
+ LOG.error(exp.toString(), exp);
return EXIT_FAILURE;
}
userOwner = User.create(HBaseKerberosUtils.loginAndReturnUGI(conf, superUser));
@@ -609,7 +610,8 @@ public class LoadTestTool extends AbstractHBaseTool {
AccessControlClient.grant(ConnectionFactory.createConnection(conf),
tableName, userOwner.getShortName(), null, null, actions);
} catch (Throwable e) {
- LOG.fatal("Error in granting permission for the user " + userOwner.getShortName(), e);
+ LOG.error(HBaseMarkers.FATAL, "Error in granting permission for the user " +
+ userOwner.getShortName(), e);
return EXIT_FAILURE;
}
}
diff --git a/hbase-metrics-api/pom.xml b/hbase-metrics-api/pom.xml
index 3baa33034f..5e138495ed 100644
--- a/hbase-metrics-api/pom.xml
+++ b/hbase-metrics-api/pom.xml
@@ -85,8 +85,8 @@
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apiorg.apache.commons
diff --git a/hbase-metrics-api/src/main/java/org/apache/hadoop/hbase/metrics/MetricRegistriesLoader.java b/hbase-metrics-api/src/main/java/org/apache/hadoop/hbase/metrics/MetricRegistriesLoader.java
index 0c29e22e37..d398c25707 100644
--- a/hbase-metrics-api/src/main/java/org/apache/hadoop/hbase/metrics/MetricRegistriesLoader.java
+++ b/hbase-metrics-api/src/main/java/org/apache/hadoop/hbase/metrics/MetricRegistriesLoader.java
@@ -25,16 +25,16 @@ import java.util.ArrayList;
import java.util.List;
import java.util.ServiceLoader;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.ReflectionUtils;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
@InterfaceAudience.Private
public class MetricRegistriesLoader {
- private static final Log LOG = LogFactory.getLog(MetricRegistries.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MetricRegistries.class);
private static final String defaultClass
= "org.apache.hadoop.hbase.metrics.impl.MetricRegistriesImpl";
diff --git a/hbase-procedure/pom.xml b/hbase-procedure/pom.xml
index a764f2a1e1..c791002e72 100644
--- a/hbase-procedure/pom.xml
+++ b/hbase-procedure/pom.xml
@@ -86,8 +86,8 @@
hbase-shaded-miscellaneous
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apicommons-cli
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/AbstractProcedureScheduler.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/AbstractProcedureScheduler.java
index 3e474513d8..fbfa5b2f10 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/AbstractProcedureScheduler.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/AbstractProcedureScheduler.java
@@ -23,13 +23,13 @@ import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public abstract class AbstractProcedureScheduler implements ProcedureScheduler {
- private static final Log LOG = LogFactory.getLog(AbstractProcedureScheduler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AbstractProcedureScheduler.class);
private final ReentrantLock schedulerLock = new ReentrantLock();
private final Condition schedWaitCond = schedulerLock.newCondition();
private boolean running = false;
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
index cbbd0e73d5..64c0233e7e 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
@@ -23,10 +23,10 @@ import java.util.Arrays;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.exceptions.TimeoutIOException;
import org.apache.hadoop.hbase.metrics.Counter;
import org.apache.hadoop.hbase.metrics.Histogram;
@@ -88,7 +88,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
@InterfaceAudience.Private
@InterfaceStability.Evolving
public abstract class Procedure implements Comparable> {
- private static final Log LOG = LogFactory.getLog(Procedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Procedure.class);
public static final long NO_PROC_ID = -1;
protected static final int NO_TIMEOUT = -1;
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureEvent.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureEvent.java
index 20803f453f..fb3d7edaa2 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureEvent.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureEvent.java
@@ -18,10 +18,9 @@
package org.apache.hadoop.hbase.procedure2;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -30,7 +29,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public class ProcedureEvent {
- private static final Log LOG = LogFactory.getLog(ProcedureEvent.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ProcedureEvent.class);
private final T object;
private boolean ready = false;
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
index ac0487165e..982525b75d 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
@@ -40,13 +40,14 @@ import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.DelayQueue;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.exceptions.IllegalArgumentIOException;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.procedure2.Procedure.LockState;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore.ProcedureIterator;
@@ -75,7 +76,7 @@ import org.apache.hadoop.hbase.util.Threads;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class ProcedureExecutor {
- private static final Log LOG = LogFactory.getLog(ProcedureExecutor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ProcedureExecutor.class);
public static final String CHECK_OWNER_SET_CONF_KEY = "hbase.procedure.check.owner.set";
private static final boolean DEFAULT_CHECK_OWNER_SET = false;
@@ -160,7 +161,7 @@ public class ProcedureExecutor {
*/
private static class CompletedProcedureCleaner
extends ProcedureInMemoryChore {
- private static final Log LOG = LogFactory.getLog(CompletedProcedureCleaner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CompletedProcedureCleaner.class);
private static final String CLEANER_INTERVAL_CONF_KEY = "hbase.procedure.cleaner.interval";
private static final int DEFAULT_CLEANER_INTERVAL = 30 * 1000; // 30sec
@@ -1364,7 +1365,7 @@ public class ProcedureExecutor {
return LockState.LOCK_YIELD_WAIT;
} catch (Throwable e) {
// Catch NullPointerExceptions or similar errors...
- LOG.fatal("CODE-BUG: Uncaught runtime exception for " + proc, e);
+ LOG.error(HBaseMarkers.FATAL, "CODE-BUG: Uncaught runtime exception for " + proc, e);
}
// allows to kill the executor before something is stored to the wal.
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java
index 4cee524a12..da4fc0599c 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java
@@ -32,10 +32,10 @@ import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.procedure2.util.DelayedUtil;
import org.apache.hadoop.hbase.procedure2.util.DelayedUtil.DelayedContainerWithTimestamp;
import org.apache.hadoop.hbase.procedure2.util.DelayedUtil.DelayedWithTimeout;
@@ -57,7 +57,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultima
*/
@InterfaceAudience.Private
public abstract class RemoteProcedureDispatcher> {
- private static final Log LOG = LogFactory.getLog(RemoteProcedureDispatcher.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RemoteProcedureDispatcher.class);
public static final String THREAD_POOL_SIZE_CONF_KEY =
"hbase.procedure.remote.dispatcher.threadpool.size";
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RootProcedureState.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RootProcedureState.java
index 1f928a42b5..46185eaae7 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RootProcedureState.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RootProcedureState.java
@@ -23,10 +23,10 @@ import java.util.HashSet;
import java.util.List;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState;
/**
@@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedu
@InterfaceAudience.Private
@InterfaceStability.Evolving
class RootProcedureState {
- private static final Log LOG = LogFactory.getLog(RootProcedureState.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RootProcedureState.class);
private enum State {
RUNNING, // The Procedure is running or ready to run
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
index 893ee0cd57..ade07cc825 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
@@ -24,10 +24,10 @@ import java.util.Arrays;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData;
/**
@@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMa
@InterfaceStability.Evolving
public abstract class StateMachineProcedure
extends Procedure {
- private static final Log LOG = LogFactory.getLog(StateMachineProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(StateMachineProcedure.class);
private static final int EOF_STATE = Integer.MIN_VALUE;
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java
index edfb3adef2..1e9ef6e78d 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java
@@ -20,14 +20,14 @@ package org.apache.hadoop.hbase.procedure2.store.wal;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStoreTracker;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader;
@@ -39,7 +39,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedu
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class ProcedureWALFile implements Comparable {
- private static final Log LOG = LogFactory.getLog(ProcedureWALFile.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ProcedureWALFile.class);
private ProcedureWALHeader header;
private FSDataInputStream stream;
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java
index a34afe5afe..84edd0fbcf 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java
@@ -25,12 +25,12 @@ import java.io.InputStream;
import java.io.OutputStream;
import java.util.Iterator;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.io.util.StreamUtils;
import org.apache.hadoop.hbase.procedure2.Procedure;
import org.apache.hadoop.hbase.procedure2.ProcedureUtil;
@@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedu
@InterfaceAudience.Private
@InterfaceStability.Evolving
public final class ProcedureWALFormat {
- private static final Log LOG = LogFactory.getLog(ProcedureWALFormat.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ProcedureWALFormat.class);
static final byte LOG_TYPE_STREAM = 0;
static final byte LOG_TYPE_COMPACTED = 1;
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
index 36d8270141..0e110884d8 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java
@@ -22,11 +22,11 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.procedure2.Procedure;
import org.apache.hadoop.hbase.procedure2.ProcedureUtil;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore.ProcedureIterator;
@@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedu
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class ProcedureWALFormatReader {
- private static final Log LOG = LogFactory.getLog(ProcedureWALFormatReader.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ProcedureWALFormatReader.class);
// ==============================================================================================
// We read the WALs in reverse order from the newest to the oldest.
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.java
index a3c7cbf423..521ae7e8ca 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.java
@@ -37,8 +37,6 @@ import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.commons.collections4.queue.CircularFifoQueue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileAlreadyExistsException;
@@ -47,6 +45,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.procedure2.Procedure;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStoreBase;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStoreTracker;
@@ -59,6 +58,8 @@ import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
@@ -67,7 +68,7 @@ import org.apache.yetus.audience.InterfaceStability;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class WALProcedureStore extends ProcedureStoreBase {
- private static final Log LOG = LogFactory.getLog(WALProcedureStore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(WALProcedureStore.class);
public static final String LOG_PREFIX = "pv2-";
/** Used to construct the name of the log directory for master procedures */
public static final String MASTER_PROCEDURE_LOGDIR = "MasterProcWALs";
@@ -496,8 +497,8 @@ public class WALProcedureStore extends ProcedureStoreBase {
} catch (IOException e) {
// We are not able to serialize the procedure.
// this is a code error, and we are not able to go on.
- LOG.fatal("Unable to serialize one of the procedure: proc=" + proc +
- ", subprocs=" + Arrays.toString(subprocs), e);
+ LOG.error(HBaseMarkers.FATAL, "Unable to serialize one of the procedure: proc=" +
+ proc + ", subprocs=" + Arrays.toString(subprocs), e);
throw new RuntimeException(e);
} finally {
releaseSlot(slot);
@@ -525,7 +526,8 @@ public class WALProcedureStore extends ProcedureStoreBase {
} catch (IOException e) {
// We are not able to serialize the procedure.
// this is a code error, and we are not able to go on.
- LOG.fatal("Unable to serialize one of the procedure: " + Arrays.toString(procs), e);
+ LOG.error(HBaseMarkers.FATAL, "Unable to serialize one of the procedure: " +
+ Arrays.toString(procs), e);
throw new RuntimeException(e);
} finally {
releaseSlot(slot);
@@ -548,7 +550,7 @@ public class WALProcedureStore extends ProcedureStoreBase {
} catch (IOException e) {
// We are not able to serialize the procedure.
// this is a code error, and we are not able to go on.
- LOG.fatal("Unable to serialize the procedure: " + proc, e);
+ LOG.error(HBaseMarkers.FATAL, "Unable to serialize the procedure: " + proc, e);
throw new RuntimeException(e);
} finally {
releaseSlot(slot);
@@ -571,7 +573,7 @@ public class WALProcedureStore extends ProcedureStoreBase {
} catch (IOException e) {
// We are not able to serialize the procedure.
// this is a code error, and we are not able to go on.
- LOG.fatal("Unable to serialize the procedure: " + procId, e);
+ LOG.error(HBaseMarkers.FATAL, "Unable to serialize the procedure: " + procId, e);
throw new RuntimeException(e);
} finally {
releaseSlot(slot);
@@ -596,7 +598,7 @@ public class WALProcedureStore extends ProcedureStoreBase {
} catch (IOException e) {
// We are not able to serialize the procedure.
// this is a code error, and we are not able to go on.
- LOG.fatal("Unable to serialize the procedure: " + proc, e);
+ LOG.error(HBaseMarkers.FATAL, "Unable to serialize the procedure: " + proc, e);
throw new RuntimeException(e);
} finally {
releaseSlot(slot);
@@ -632,7 +634,7 @@ public class WALProcedureStore extends ProcedureStoreBase {
} catch (IOException e) {
// We are not able to serialize the procedure.
// this is a code error, and we are not able to go on.
- LOG.fatal("Unable to serialize the procedures: " + Arrays.toString(procIds), e);
+ LOG.error("Unable to serialize the procedures: " + Arrays.toString(procIds), e);
throw new RuntimeException(e);
} finally {
releaseSlot(slot);
@@ -902,7 +904,7 @@ public class WALProcedureStore extends ProcedureStoreBase {
LOG.warn("Unable to roll the log, attempt=" + (i + 1), e);
}
}
- LOG.fatal("Unable to roll the log");
+ LOG.error(HBaseMarkers.FATAL, "Unable to roll the log");
return false;
}
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/ProcedureTestingUtility.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/ProcedureTestingUtility.java
index 6e0c02eb2a..2558a31f08 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/ProcedureTestingUtility.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/ProcedureTestingUtility.java
@@ -27,8 +27,6 @@ import java.util.ArrayList;
import java.util.Set;
import java.util.concurrent.Callable;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -44,9 +42,11 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState;
import org.apache.hadoop.hbase.util.NonceKey;
import org.apache.hadoop.hbase.util.Threads;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class ProcedureTestingUtility {
- private static final Log LOG = LogFactory.getLog(ProcedureTestingUtility.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ProcedureTestingUtility.class);
private ProcedureTestingUtility() {
}
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestChildProcedures.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestChildProcedures.java
index 4c1611a6c5..79ce73617c 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestChildProcedures.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestChildProcedures.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.procedure2;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
@@ -32,13 +30,15 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@Category({MasterTests.class, SmallTests.class})
public class TestChildProcedures {
- private static final Log LOG = LogFactory.getLog(TestChildProcedures.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestChildProcedures.class);
private static final int PROCEDURE_EXECUTOR_SLOTS = 1;
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureEvents.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureEvents.java
index d2b2b7d54d..9588d998da 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureEvents.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureEvents.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.procedure2;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
@@ -37,12 +35,14 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
@Category({MasterTests.class, SmallTests.class})
public class TestProcedureEvents {
- private static final Log LOG = LogFactory.getLog(TestProcedureEvents.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestProcedureEvents.class);
private TestProcEnv procEnv;
private ProcedureStore procStore;
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecution.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecution.java
index ed6d512df7..ae781cd913 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecution.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecution.java
@@ -21,9 +21,8 @@ package org.apache.hadoop.hbase.procedure2;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
+import java.util.Objects;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
@@ -36,13 +35,15 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@Category({MasterTests.class, SmallTests.class})
public class TestProcedureExecution {
- private static final Log LOG = LogFactory.getLog(TestProcedureExecution.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestProcedureExecution.class);
private static final int PROCEDURE_EXECUTOR_SLOTS = 1;
private static final Procedure NULL_PROC = null;
@@ -136,7 +137,7 @@ public class TestProcedureExecution {
// subProc1 has a "null" subprocedure which is catched as InvalidArgument
// failed state with 2 execute and 2 rollback
- LOG.info(state);
+ LOG.info(Objects.toString(state));
Procedure> result = procExecutor.getResult(rootId);
assertTrue(state.toString(), result.isFailed());
ProcedureTestingUtility.assertIsIllegalArgumentException(result);
@@ -157,7 +158,7 @@ public class TestProcedureExecution {
long rootId = ProcedureTestingUtility.submitAndWait(procExecutor, rootProc);
// successful state, with 3 execute
- LOG.info(state);
+ LOG.info(Objects.toString(state));
Procedure> result = procExecutor.getResult(rootId);
ProcedureTestingUtility.assertProcNotFailed(result);
assertEquals(state.toString(), 3, state.size());
@@ -173,7 +174,7 @@ public class TestProcedureExecution {
long rootId = ProcedureTestingUtility.submitAndWait(procExecutor, rootProc);
// the 3rd proc fail, rollback after 2 successful execution
- LOG.info(state);
+ LOG.info(Objects.toString(state));
Procedure> result = procExecutor.getResult(rootId);
assertTrue(state.toString(), result.isFailed());
LOG.info(result.getException().getMessage());
@@ -300,7 +301,7 @@ public class TestProcedureExecution {
long startTime = EnvironmentEdgeManager.currentTime();
long rootId = ProcedureTestingUtility.submitAndWait(procExecutor, proc);
long execTime = EnvironmentEdgeManager.currentTime() - startTime;
- LOG.info(state);
+ LOG.info(Objects.toString(state));
assertTrue("we didn't wait enough execTime=" + execTime, execTime >= PROC_TIMEOUT_MSEC);
Procedure> result = procExecutor.getResult(rootId);
assertTrue(state.toString(), result.isFailed());
@@ -316,7 +317,7 @@ public class TestProcedureExecution {
Procedure proc = new TestWaitingProcedure("wproc", state, true);
proc.setTimeout(2500);
long rootId = ProcedureTestingUtility.submitAndWait(procExecutor, proc);
- LOG.info(state);
+ LOG.info(Objects.toString(state));
Procedure> result = procExecutor.getResult(rootId);
assertTrue(state.toString(), result.isFailed());
ProcedureTestingUtility.assertIsTimeoutException(result);
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecutor.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecutor.java
index 289987be8b..29a0472a45 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecutor.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecutor.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.procedure2;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
@@ -36,12 +34,14 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
@Category({MasterTests.class, SmallTests.class})
public class TestProcedureExecutor {
- private static final Log LOG = LogFactory.getLog(TestProcedureExecutor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestProcedureExecutor.class);
private TestProcEnv procEnv;
private NoopProcedureStore procStore;
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureInMemoryChore.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureInMemoryChore.java
index 50ccfa60f3..6546ea3c60 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureInMemoryChore.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureInMemoryChore.java
@@ -22,8 +22,6 @@ import java.io.IOException;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.procedure2.store.NoopProcedureStore;
import org.apache.hadoop.hbase.testclassification.SmallTests;
@@ -32,6 +30,8 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
@@ -39,7 +39,7 @@ import static org.junit.Assert.assertTrue;
@Category({MasterTests.class, SmallTests.class})
public class TestProcedureInMemoryChore {
- private static final Log LOG = LogFactory.getLog(TestProcedureInMemoryChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestProcedureInMemoryChore.class);
private static final int PROCEDURE_EXECUTOR_SLOTS = 1;
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureMetrics.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureMetrics.java
index 6246629ef9..0550a91abe 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureMetrics.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureMetrics.java
@@ -18,8 +18,6 @@
package org.apache.hadoop.hbase.procedure2;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
@@ -30,6 +28,8 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
@@ -39,7 +39,7 @@ import static org.junit.Assert.assertTrue;
@Category({MasterTests.class, SmallTests.class})
public class TestProcedureMetrics {
- private static final Log LOG = LogFactory.getLog(TestProcedureMetrics.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestProcedureMetrics.class);
private static final int PROCEDURE_EXECUTOR_SLOTS = 1;
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureNonce.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureNonce.java
index 12a8012ef8..bebfae001a 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureNonce.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureNonce.java
@@ -22,8 +22,6 @@ import java.io.IOException;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.CountDownLatch;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
@@ -39,6 +37,8 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
@@ -46,7 +46,7 @@ import static org.junit.Assert.assertTrue;
@Category({MasterTests.class, SmallTests.class})
public class TestProcedureNonce {
- private static final Log LOG = LogFactory.getLog(TestProcedureNonce.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestProcedureNonce.class);
private static final int PROCEDURE_EXECUTOR_SLOTS = 2;
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.java
index 06f8833a58..8fe56fe00b 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.java
@@ -22,8 +22,6 @@ import java.io.IOException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.CountDownLatch;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
@@ -39,6 +37,8 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
@@ -46,7 +46,7 @@ import static org.junit.Assert.assertTrue;
@Category({MasterTests.class, SmallTests.class})
public class TestProcedureRecovery {
- private static final Log LOG = LogFactory.getLog(TestProcedureRecovery.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestProcedureRecovery.class);
private static final int PROCEDURE_EXECUTOR_SLOTS = 1;
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java
index 12b21847db..20d60ceb45 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java
@@ -22,8 +22,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
@@ -37,6 +35,8 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@@ -44,7 +44,7 @@ import static org.junit.Assert.fail;
@Category({MasterTests.class, LargeTests.class})
public class TestProcedureReplayOrder {
- private static final Log LOG = LogFactory.getLog(TestProcedureReplayOrder.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestProcedureReplayOrder.class);
private static final int NUM_THREADS = 16;
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSchedulerConcurrency.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSchedulerConcurrency.java
index 1c8f1ebb66..6116736d3c 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSchedulerConcurrency.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSchedulerConcurrency.java
@@ -22,8 +22,6 @@ import java.io.IOException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.ConcurrentSkipListSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.NoopProcedure;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.MasterTests;
@@ -33,6 +31,8 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
@@ -40,7 +40,7 @@ import static org.junit.Assert.assertTrue;
@Category({MasterTests.class, MediumTests.class})
public class TestProcedureSchedulerConcurrency {
- private static final Log LOG = LogFactory.getLog(TestProcedureEvents.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestProcedureEvents.class);
private SimpleProcedureScheduler procSched;
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSuspended.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSuspended.java
index 3803abae27..1a426505ea 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSuspended.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSuspended.java
@@ -25,8 +25,6 @@ import java.util.ArrayList;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.procedure2.store.NoopProcedureStore;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
@@ -37,10 +35,12 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, SmallTests.class})
public class TestProcedureSuspended {
- private static final Log LOG = LogFactory.getLog(TestProcedureSuspended.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestProcedureSuspended.class);
private static final int PROCEDURE_EXECUTOR_SLOTS = 1;
private static final Procedure NULL_PROC = null;
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestStateMachineProcedure.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestStateMachineProcedure.java
index a61370c544..43d47f2286 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestStateMachineProcedure.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestStateMachineProcedure.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.procedure2;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
@@ -35,13 +33,15 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@Category({MasterTests.class, SmallTests.class})
public class TestStateMachineProcedure {
- private static final Log LOG = LogFactory.getLog(TestStateMachineProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestStateMachineProcedure.class);
private static final Exception TEST_FAILURE_EXCEPTION = new Exception("test failure") {
@Override
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestYieldProcedures.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestYieldProcedures.java
index 017992cfea..202353526a 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestYieldProcedures.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestYieldProcedures.java
@@ -24,8 +24,6 @@ import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
@@ -37,13 +35,15 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@Category({MasterTests.class, SmallTests.class})
public class TestYieldProcedures {
- private static final Log LOG = LogFactory.getLog(TestYieldProcedures.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestYieldProcedures.class);
private static final int PROCEDURE_EXECUTOR_SLOTS = 1;
private static final Procedure NULL_PROC = null;
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/TestProcedureStoreTracker.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/TestProcedureStoreTracker.java
index 550116e2e1..e4766f6f15 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/TestProcedureStoreTracker.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/TestProcedureStoreTracker.java
@@ -20,13 +20,13 @@ package org.apache.hadoop.hbase.procedure2.store;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.apache.hadoop.hbase.procedure2.store.ProcedureStoreTracker.BitSetNode;
import static org.junit.Assert.assertEquals;
@@ -35,7 +35,7 @@ import static org.junit.Assert.assertTrue;
@Category({MasterTests.class, SmallTests.class})
public class TestProcedureStoreTracker {
- private static final Log LOG = LogFactory.getLog(TestProcedureStoreTracker.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestProcedureStoreTracker.class);
@Test
public void testSeqInsertAndDelete() {
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestStressWALProcedureStore.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestStressWALProcedureStore.java
index 98ec1146e7..31c9cf3ee2 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestStressWALProcedureStore.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestStressWALProcedureStore.java
@@ -22,8 +22,6 @@ import java.io.IOException;
import java.util.Random;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -40,6 +38,8 @@ import org.junit.Before;
import org.junit.Test;
import org.junit.Ignore;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
@@ -48,7 +48,7 @@ import static org.junit.Assert.fail;
@Category({MasterTests.class, LargeTests.class})
public class TestStressWALProcedureStore {
- private static final Log LOG = LogFactory.getLog(TestWALProcedureStore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestWALProcedureStore.class);
private static final int PROCEDURE_STORE_SLOTS = 8;
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.java
index 98b1b7c9d6..a7bab8f625 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.java
@@ -28,8 +28,6 @@ import java.util.Comparator;
import java.util.HashSet;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileStatus;
@@ -53,6 +51,8 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
@@ -61,7 +61,7 @@ import static org.junit.Assert.fail;
@Category({MasterTests.class, SmallTests.class})
public class TestWALProcedureStore {
- private static final Log LOG = LogFactory.getLog(TestWALProcedureStore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestWALProcedureStore.class);
private static final int PROCEDURE_STORE_SLOTS = 1;
private static final Procedure NULL_PROC = null;
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/util/TestDelayedUtil.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/util/TestDelayedUtil.java
index 019b4567fd..dcb133e9ef 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/util/TestDelayedUtil.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/util/TestDelayedUtil.java
@@ -18,18 +18,18 @@
package org.apache.hadoop.hbase.procedure2.util;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
@Category({MasterTests.class, SmallTests.class})
public class TestDelayedUtil {
- private static final Log LOG = LogFactory.getLog(TestDelayedUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestDelayedUtil.class);
@Test
public void testDelayedContainerEquals() {
diff --git a/hbase-protocol-shaded/pom.xml b/hbase-protocol-shaded/pom.xml
index 16766919c5..e71a21bafc 100644
--- a/hbase-protocol-shaded/pom.xml
+++ b/hbase-protocol-shaded/pom.xml
@@ -169,6 +169,7 @@
junit:junitlog4j:log4jcommons-logging:commons-logging
+ org.slf4j:slf4j-apiorg.apache.yetus:audience-annotationscom.github.stephenc.fingbugs:*
diff --git a/hbase-protocol/pom.xml b/hbase-protocol/pom.xml
index a608dffccd..d07a031dfe 100644
--- a/hbase-protocol/pom.xml
+++ b/hbase-protocol/pom.xml
@@ -165,8 +165,8 @@
protobuf-java
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-api
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/util/ByteStringer.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/util/ByteStringer.java
index e8491f7e3d..65f1cc6721 100644
--- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/util/ByteStringer.java
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/util/ByteStringer.java
@@ -17,9 +17,9 @@
*/
package org.apache.hadoop.hbase.util;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.google.protobuf.ByteString;
import com.google.protobuf.HBaseZeroCopyByteString;
@@ -29,7 +29,7 @@ import com.google.protobuf.HBaseZeroCopyByteString;
*/
@InterfaceAudience.Private
public class ByteStringer {
- private static final Log LOG = LogFactory.getLog(ByteStringer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ByteStringer.class);
/**
* Flag set at class loading time.
diff --git a/hbase-replication/pom.xml b/hbase-replication/pom.xml
index 0236601b4f..ab221999ad 100644
--- a/hbase-replication/pom.xml
+++ b/hbase-replication/pom.xml
@@ -127,8 +127,8 @@
commons-lang3
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apiorg.apache.zookeeper
diff --git a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerZKImpl.java b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerZKImpl.java
index 214a313631..454d09ce35 100644
--- a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerZKImpl.java
+++ b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerZKImpl.java
@@ -25,13 +25,12 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos;
import org.apache.hadoop.hbase.zookeeper.ZKNodeTracker;
@@ -40,11 +39,13 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.KeeperException.NodeExistsException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class ReplicationPeerZKImpl extends ReplicationStateZKBase
implements ReplicationPeer, Abortable, Closeable {
- private static final Log LOG = LogFactory.getLog(ReplicationPeerZKImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationPeerZKImpl.class);
private ReplicationPeerConfig peerConfig;
private final String id;
@@ -187,8 +188,8 @@ public class ReplicationPeerZKImpl extends ReplicationStateZKBase
@Override
public void abort(String why, Throwable e) {
- LOG.fatal("The ReplicationPeer corresponding to peer " + peerConfig
- + " was aborted for the following reason(s):" + why, e);
+ LOG.error(HBaseMarkers.FATAL, "The ReplicationPeer corresponding to peer " +
+ peerConfig + " was aborted for the following reason(s):" + why, e);
}
@Override
diff --git a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java
index ca99f65a29..8f5e8d57e9 100644
--- a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java
+++ b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java
@@ -28,8 +28,6 @@ import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.CompoundConfiguration;
@@ -47,6 +45,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This class provides an implementation of the ReplicationPeers interface using ZooKeeper. The
@@ -82,7 +82,7 @@ public class ReplicationPeersZKImpl extends ReplicationStateZKBase implements Re
private final ReplicationQueuesClient queuesClient;
private Abortable abortable;
- private static final Log LOG = LogFactory.getLog(ReplicationPeersZKImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationPeersZKImpl.class);
public ReplicationPeersZKImpl(final ZKWatcher zk, final Configuration conf,
final ReplicationQueuesClient queuesClient, Abortable abortable) {
diff --git a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueueInfo.java b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueueInfo.java
index 68b7ebeec9..ecd888f51e 100644
--- a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueueInfo.java
+++ b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueueInfo.java
@@ -23,10 +23,9 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.ServerName;
/**
@@ -36,7 +35,7 @@ import org.apache.hadoop.hbase.ServerName;
*/
@InterfaceAudience.Private
public class ReplicationQueueInfo {
- private static final Log LOG = LogFactory.getLog(ReplicationQueueInfo.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationQueueInfo.class);
private final String peerId;
private final String peerClusterZnode;
diff --git a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
index b998f15975..e85b42ae0a 100644
--- a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
+++ b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.replication;
import java.util.List;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet;
@@ -33,12 +31,14 @@ import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.data.Stat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class ReplicationQueuesClientZKImpl extends ReplicationStateZKBase implements
ReplicationQueuesClient {
- Log LOG = LogFactory.getLog(ReplicationQueuesClientZKImpl.class);
+ Logger LOG = LoggerFactory.getLogger(ReplicationQueuesClientZKImpl.class);
public ReplicationQueuesClientZKImpl(ReplicationQueuesClientArguments args) {
this(args.getZk(), args.getConf(), args.getAbortable());
diff --git a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
index 95fd29430c..7551cb7660 100644
--- a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
+++ b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
@@ -23,8 +23,6 @@ import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Abortable;
@@ -38,6 +36,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This class provides an implementation of the
@@ -67,7 +67,7 @@ public class ReplicationQueuesZKImpl extends ReplicationStateZKBase implements R
/** Znode containing all replication queues for this region server. */
private String myQueuesZnode;
- private static final Log LOG = LogFactory.getLog(ReplicationQueuesZKImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationQueuesZKImpl.class);
public ReplicationQueuesZKImpl(ReplicationQueuesArguments args) {
this(args.getZk(), args.getConf(), args.getAbortable());
diff --git a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java
index 300a93b602..9a1d9aaefb 100644
--- a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java
+++ b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java
@@ -22,8 +22,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.zookeeper.ZKListener;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
@@ -32,6 +30,8 @@ import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This class is a ZooKeeper implementation of the ReplicationTracker interface. This class is
@@ -41,7 +41,7 @@ import org.apache.zookeeper.KeeperException;
@InterfaceAudience.Private
public class ReplicationTrackerZKImpl extends ReplicationStateZKBase implements ReplicationTracker {
- private static final Log LOG = LogFactory.getLog(ReplicationTrackerZKImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationTrackerZKImpl.class);
// All about stopping
private final Stoppable stopper;
// listeners to be notified
diff --git a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesImpl.java b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesImpl.java
index 546464344d..b6c849c735 100644
--- a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesImpl.java
+++ b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesImpl.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.hbase.replication;
import org.apache.commons.lang3.NotImplementedException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
@@ -42,6 +40,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
@@ -59,7 +59,7 @@ import java.util.TreeSet;
public class TableBasedReplicationQueuesImpl extends ReplicationTableBase
implements ReplicationQueues {
- private static final Log LOG = LogFactory.getLog(TableBasedReplicationQueuesImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableBasedReplicationQueuesImpl.class);
// Common byte values used in replication offset tracking
private static final byte[] INITIAL_OFFSET_BYTES = Bytes.toBytes(0L);
diff --git a/hbase-rest/pom.xml b/hbase-rest/pom.xml
index e7dc864bd1..d1036f2d8e 100644
--- a/hbase-rest/pom.xml
+++ b/hbase-rest/pom.xml
@@ -269,8 +269,8 @@
commons-lang3
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apijavax.xml.bind
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
index 921b17c97c..9f353aab6c 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
@@ -27,9 +27,9 @@ import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.rest.model.CellModel;
@@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.rest.model.RowModel;
@InterfaceAudience.Private
public class MultiRowResource extends ResourceBase implements Constants {
- private static final Log LOG = LogFactory.getLog(MultiRowResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MultiRowResource.class);
TableResource tableResource;
Integer versions = null;
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
index 4faf1d18eb..3ff25f99ef 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
@@ -34,11 +34,11 @@ import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.rest.model.NamespacesInstanceModel;
import org.apache.hadoop.hbase.rest.model.TableListModel;
@@ -57,7 +57,7 @@ import org.apache.hadoop.hbase.rest.model.TableModel;
@InterfaceAudience.Private
public class NamespacesInstanceResource extends ResourceBase {
- private static final Log LOG = LogFactory.getLog(NamespacesInstanceResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(NamespacesInstanceResource.class);
String namespace;
boolean queryTables = false;
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
index 4c5390aa6f..fe48bafd47 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
@@ -30,9 +30,9 @@ import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.rest.model.NamespacesModel;
/**
@@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.rest.model.NamespacesModel;
@InterfaceAudience.Private
public class NamespacesResource extends ResourceBase {
- private static final Log LOG = LogFactory.getLog(NamespacesResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(NamespacesResource.class);
/**
* Constructor
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingOutput.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingOutput.java
index b06704496d..5ea8a316d3 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingOutput.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingOutput.java
@@ -24,8 +24,6 @@ import java.util.List;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.StreamingOutput;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.client.Result;
@@ -34,9 +32,11 @@ import org.apache.hadoop.hbase.rest.model.CellModel;
import org.apache.hadoop.hbase.rest.model.CellSetModel;
import org.apache.hadoop.hbase.rest.model.RowModel;
import org.apache.hadoop.hbase.util.Bytes;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class ProtobufStreamingOutput implements StreamingOutput {
- private static final Log LOG = LogFactory.getLog(ProtobufStreamingOutput.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ProtobufStreamingOutput.class);
private String contentType;
private ResultScanner resultScanner;
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
index 360ab9d5e6..e2fccf0687 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
@@ -32,13 +32,12 @@ import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang3.ArrayUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.http.InfoServer;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.rest.filter.AuthFilter;
import org.apache.hadoop.hbase.rest.filter.GzipFilter;
import org.apache.hadoop.hbase.rest.filter.RestCsrfPreventionFilter;
@@ -68,6 +67,8 @@ import org.eclipse.jetty.servlet.FilterHolder;
import org.glassfish.jersey.server.ResourceConfig;
import org.glassfish.jersey.servlet.ServletContainer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import javax.servlet.DispatcherType;
@@ -82,7 +83,7 @@ import javax.servlet.DispatcherType;
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
public class RESTServer implements Constants {
- static Log LOG = LogFactory.getLog("RESTServer");
+ static Logger LOG = LoggerFactory.getLogger("RESTServer");
static String REST_CSRF_ENABLED_KEY = "hbase.rest.csrf.enabled";
static boolean REST_CSRF_ENABLED_DEFAULT = false;
@@ -356,7 +357,7 @@ public class RESTServer implements Constants {
server.start();
server.join();
} catch (Exception e) {
- LOG.fatal("Failed to start server", e);
+ LOG.error(HBaseMarkers.FATAL, "Failed to start server", e);
System.exit(1);
}
LOG.info("***** STOPPING service '" + RESTServer.class.getSimpleName() + "' *****");
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
index 1e5d4a98c5..b2fa16dde2 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
@@ -20,10 +20,10 @@ package org.apache.hadoop.hbase.rest;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.filter.ParseFilter;
@@ -38,7 +38,7 @@ import org.apache.hadoop.security.authorize.ProxyUsers;
*/
@InterfaceAudience.Private
public class RESTServlet implements Constants {
- private static final Log LOG = LogFactory.getLog(RESTServlet.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RESTServlet.class);
private static RESTServlet INSTANCE;
private final Configuration conf;
private final MetricsREST metrics;
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
index 183262d2d2..1e0f7beb9a 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
@@ -29,8 +29,6 @@ import javax.ws.rs.core.UriInfo;
import java.io.IOException;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
@@ -41,10 +39,12 @@ import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.rest.model.TableInfoModel;
import org.apache.hadoop.hbase.rest.model.TableRegionModel;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class RegionsResource extends ResourceBase {
- private static final Log LOG = LogFactory.getLog(RegionsResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionsResource.class);
static CacheControl cacheControl;
static {
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
index d2ddb0d76c..98217451c5 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
@@ -31,10 +31,9 @@ import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import javax.ws.rs.core.Response.ResponseBuilder;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.rest.model.TableListModel;
import org.apache.hadoop.hbase.rest.model.TableModel;
@@ -42,7 +41,7 @@ import org.apache.hadoop.hbase.rest.model.TableModel;
@Path("/")
@InterfaceAudience.Private
public class RootResource extends ResourceBase {
- private static final Log LOG = LogFactory.getLog(RootResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RootResource.class);
static CacheControl cacheControl;
static {
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
index 8c1cb5b2cf..b440fdf3a4 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
@@ -37,8 +37,6 @@ import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.UriInfo;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilderFactory;
@@ -56,10 +54,12 @@ import org.apache.hadoop.hbase.rest.model.CellSetModel;
import org.apache.hadoop.hbase.rest.model.RowModel;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class RowResource extends ResourceBase {
- private static final Log LOG = LogFactory.getLog(RowResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RowResource.class);
private static final String CHECK_PUT = "put";
private static final String CHECK_DELETE = "delete";
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java
index 1edd73a063..9571c82d1b 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java
@@ -23,10 +23,10 @@ import java.io.IOException;
import java.util.Iterator;
import java.util.NoSuchElementException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.client.Get;
@@ -37,7 +37,7 @@ import org.apache.hadoop.util.StringUtils;
@InterfaceAudience.Private
public class RowResultGenerator extends ResultGenerator {
- private static final Log LOG = LogFactory.getLog(RowResultGenerator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RowResultGenerator.class);
private Iterator valuesI;
private Cell cache;
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
index 8f5611589b..b3e3985423 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
@@ -31,12 +31,12 @@ import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.UriInfo;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.rest.model.CellModel;
import org.apache.hadoop.hbase.rest.model.CellSetModel;
import org.apache.hadoop.hbase.rest.model.RowModel;
@@ -45,8 +45,8 @@ import org.apache.hadoop.hbase.util.Bytes;
@InterfaceAudience.Private
public class ScannerInstanceResource extends ResourceBase {
- private static final Log LOG =
- LogFactory.getLog(ScannerInstanceResource.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(ScannerInstanceResource.class);
static CacheControl cacheControl;
static {
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
index 60b348ee7f..d2b173fa0c 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
@@ -35,10 +35,9 @@ import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriBuilder;
import javax.ws.rs.core.UriInfo;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.rest.model.ScannerModel;
@@ -46,7 +45,7 @@ import org.apache.hadoop.hbase.rest.model.ScannerModel;
@InterfaceAudience.Private
public class ScannerResource extends ResourceBase {
- private static final Log LOG = LogFactory.getLog(ScannerResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ScannerResource.class);
static final Map scanners =
Collections.synchronizedMap(new HashMap());
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java
index ece4f1249b..b622fede6b 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java
@@ -22,14 +22,14 @@ package org.apache.hadoop.hbase.rest;
import java.io.IOException;
import java.util.Iterator;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.TableNotEnabledException;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.UnknownScannerException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
@@ -42,8 +42,8 @@ import org.apache.hadoop.util.StringUtils;
@InterfaceAudience.Private
public class ScannerResultGenerator extends ResultGenerator {
- private static final Log LOG =
- LogFactory.getLog(ScannerResultGenerator.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(ScannerResultGenerator.class);
public static Filter buildFilterFromModel(final ScannerModel model)
throws Exception {
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
index 8ce59eb483..e617cd4426 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
@@ -35,8 +35,6 @@ import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.UriInfo;
import javax.xml.namespace.QName;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableExistsException;
@@ -44,6 +42,8 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotEnabledException;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.rest.model.ColumnSchemaModel;
@@ -51,7 +51,7 @@ import org.apache.hadoop.hbase.rest.model.TableSchemaModel;
@InterfaceAudience.Private
public class SchemaResource extends ResourceBase {
- private static final Log LOG = LogFactory.getLog(SchemaResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SchemaResource.class);
static CacheControl cacheControl;
static {
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
index 460f86a68a..90ebccb47e 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
@@ -30,10 +30,9 @@ import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.UriInfo;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.ServerLoad;
import org.apache.hadoop.hbase.RegionLoad;
@@ -43,8 +42,8 @@ import org.apache.hadoop.hbase.rest.model.StorageClusterStatusModel;
@InterfaceAudience.Private
public class StorageClusterStatusResource extends ResourceBase {
- private static final Log LOG =
- LogFactory.getLog(StorageClusterStatusResource.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(StorageClusterStatusResource.class);
static CacheControl cacheControl;
static {
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
index 42f531cc7c..3d70410a40 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
@@ -30,17 +30,16 @@ import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import javax.ws.rs.core.Response.ResponseBuilder;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.ClusterStatus.Option;
import org.apache.hadoop.hbase.rest.model.StorageClusterVersionModel;
@InterfaceAudience.Private
public class StorageClusterVersionResource extends ResourceBase {
- private static final Log LOG =
- LogFactory.getLog(StorageClusterVersionResource.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(StorageClusterVersionResource.class);
static CacheControl cacheControl;
static {
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
index b32db7f19c..b52b91ba10 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
@@ -21,21 +21,16 @@ package org.apache.hadoop.hbase.rest;
import java.io.IOException;
import java.util.List;
-
import javax.ws.rs.DefaultValue;
import javax.ws.rs.Encoded;
-import javax.ws.rs.HeaderParam;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.UriInfo;
-
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.filter.Filter;
@@ -48,7 +43,7 @@ import org.apache.hadoop.hbase.util.Bytes;
public class TableResource extends ResourceBase {
String table;
- private static final Log LOG = LogFactory.getLog(TableResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableResource.class);
/**
* Constructor
@@ -206,7 +201,7 @@ public class TableResource extends ResourceBase {
} catch (IOException exp) {
servlet.getMetrics().incrementFailedScanRequests(1);
processException(exp);
- LOG.warn(exp);
+ LOG.warn(exp.toString(), exp);
return null;
}
}
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java
index f8b959331d..05bb0d6a05 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java
@@ -36,22 +36,22 @@ import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.rest.model.CellModel;
import org.apache.hadoop.hbase.rest.model.RowModel;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
@InterfaceAudience.Private
public class TableScanResource extends ResourceBase {
- private static final Log LOG = LogFactory.getLog(TableScanResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableScanResource.class);
TableResource tableResource;
ResultScanner results;
@@ -126,7 +126,7 @@ public class TableScanResource extends ResourceBase {
} catch (Exception exp) {
servlet.getMetrics().incrementFailedScanRequests(1);
processException(exp);
- LOG.warn(exp);
+ LOG.warn(exp.toString(), exp);
return null;
}
}
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
index 3847840893..c212334153 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
@@ -31,10 +31,9 @@ import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import javax.ws.rs.core.Response.ResponseBuilder;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.rest.model.VersionModel;
/**
@@ -47,7 +46,7 @@ import org.apache.hadoop.hbase.rest.model.VersionModel;
@InterfaceAudience.Private
public class VersionResource extends ResourceBase {
- private static final Log LOG = LogFactory.getLog(VersionResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(VersionResource.class);
static CacheControl cacheControl;
static {
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
index c756a79534..d8cf5f4a11 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
@@ -29,9 +29,9 @@ import java.util.Collections;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.http.Header;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
@@ -55,7 +55,7 @@ import org.apache.http.util.EntityUtils;
public class Client {
public static final Header[] EMPTY_HEADER_ARRAY = new Header[0];
- private static final Log LOG = LogFactory.getLog(Client.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Client.class);
private HttpClient httpClient;
private Cluster cluster;
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
index 71001b0fc0..bb48243ad7 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
@@ -23,8 +23,7 @@ import com.google.protobuf.Descriptors;
import com.google.protobuf.Message;
import com.google.protobuf.Service;
import com.google.protobuf.ServiceException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
@@ -35,6 +34,8 @@ import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability;
@@ -84,7 +85,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
@InterfaceAudience.Public
public class RemoteHTable implements Table {
- private static final Log LOG = LogFactory.getLog(RemoteHTable.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RemoteHTable.class);
final Client client;
final Configuration conf;
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Response.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Response.java
index 8f68f664ba..adffc126b7 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Response.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Response.java
@@ -22,9 +22,9 @@ package org.apache.hadoop.hbase.rest.client;
import java.io.IOException;
import java.io.InputStream;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.http.Header;
import org.apache.http.HttpResponse;
@@ -33,7 +33,7 @@ import org.apache.http.HttpResponse;
*/
@InterfaceAudience.Public
public class Response {
- private static final Log LOG = LogFactory.getLog(Response.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Response.class);
private int code;
private Header[] headers;
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
index f051bc8212..5dfa58caec 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
@@ -28,17 +28,17 @@ import java.util.Properties;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.util.DNS;
import org.apache.hadoop.hbase.util.Strings;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class AuthFilter extends AuthenticationFilter {
- private static final Log LOG = LogFactory.getLog(AuthFilter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AuthFilter.class);
private static final String REST_PREFIX = "hbase.rest.authentication.";
private static final int REST_PREFIX_LEN = REST_PREFIX.length();
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
index 76dc70e737..31a437a178 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
@@ -34,10 +34,10 @@ import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
/**
@@ -50,8 +50,8 @@ import org.apache.hadoop.conf.Configuration;
@InterfaceAudience.Public
public class RestCsrfPreventionFilter implements Filter {
- private static final Log LOG =
- LogFactory.getLog(RestCsrfPreventionFilter.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(RestCsrfPreventionFilter.class);
public static final String HEADER_USER_AGENT = "User-Agent";
public static final String BROWSER_USER_AGENT_PARAM =
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
index 4483bdbe50..882bd983d0 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
@@ -32,9 +32,9 @@ import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.ext.MessageBodyReader;
import javax.ws.rs.ext.Provider;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.rest.Constants;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
@@ -47,8 +47,8 @@ import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
@InterfaceAudience.Private
public class ProtobufMessageBodyConsumer
implements MessageBodyReader {
- private static final Log LOG =
- LogFactory.getLog(ProtobufMessageBodyConsumer.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(ProtobufMessageBodyConsumer.class);
@Override
public boolean isReadable(Class> type, Type genericType,
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/DummyFilter.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/DummyFilter.java
index 7ad162431a..5af8ee2bfa 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/DummyFilter.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/DummyFilter.java
@@ -28,11 +28,11 @@ import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class DummyFilter implements Filter {
- private static final Log LOG = LogFactory.getLog(DummyFilter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DummyFilter.class);
@Override
public void destroy() {
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java
index 4cce21b369..273010a334 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.rest;
import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import org.apache.commons.lang3.ArrayUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.http.HttpServerUtil;
@@ -36,6 +34,8 @@ import org.eclipse.jetty.servlet.ServletHolder;
import org.glassfish.jersey.server.ResourceConfig;
import org.glassfish.jersey.servlet.ServletContainer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import javax.servlet.DispatcherType;
import java.util.Arrays;
@@ -43,7 +43,7 @@ import java.util.EnumSet;
public class HBaseRESTTestingUtility {
- private static final Log LOG = LogFactory.getLog(HBaseRESTTestingUtility.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HBaseRESTTestingUtility.class);
private int testServletPort;
private Server server;
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java
index 476594e080..21d25e289e 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java
@@ -34,8 +34,6 @@ import java.util.TreeMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FSDataInputStream;
@@ -62,7 +60,6 @@ import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.filter.BinaryComparator;
-import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.PageFilter;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
@@ -94,6 +91,8 @@ import org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer;
import org.apache.hadoop.util.LineReader;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Script used evaluating Stargate performance and scalability. Runs a SG
@@ -112,7 +111,8 @@ import org.apache.hadoop.util.ToolRunner;
* runs an individual client. Each client does about 1GB of data.
*/
public class PerformanceEvaluation extends Configured implements Tool {
- protected static final Log LOG = LogFactory.getLog(PerformanceEvaluation.class.getName());
+ protected static final Logger LOG =
+ LoggerFactory.getLogger(PerformanceEvaluation.class);
private static final int DEFAULT_ROW_PREFIX_LENGTH = 16;
private static final int ROW_LENGTH = 1000;
@@ -418,7 +418,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
}
key = NullWritable.get();
- value = (PeInputSplit)split;
+ value = split;
readOver = true;
return true;
@@ -490,10 +490,12 @@ public class PerformanceEvaluation extends Configured implements Tool {
return clazz;
}
+ @Override
protected void map(NullWritable key, PeInputSplit value, final Context context)
throws IOException, InterruptedException {
Status status = new Status() {
+ @Override
public void setStatus(String msg) {
context.setStatus(msg);
}
@@ -635,6 +637,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
long elapsedTime = pe.runOneClient(cmd, index * perClientRows,
perClientRows, R,
flushCommits, writeToWAL, useTags, noOfTags, connection, new Status() {
+ @Override
public void setStatus(final String msg) throws IOException {
LOG.info("client-" + getName() + " " + msg);
}
@@ -956,6 +959,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
super(conf, options, status);
}
+ @Override
void testSetup() throws IOException {
this.table = connection.getTable(tableName);
}
@@ -975,10 +979,12 @@ public class PerformanceEvaluation extends Configured implements Tool {
this.flushCommits = options.isFlushCommits();
}
+ @Override
void testSetup() throws IOException {
this.mutator = connection.getBufferedMutator(tableName);
}
+ @Override
void testTakedown() throws IOException {
if (flushCommits) {
this.mutator.flush();
@@ -1214,7 +1220,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
}
static class FilteredScanTest extends TableTest {
- protected static final Log LOG = LogFactory.getLog(FilteredScanTest.class.getName());
+ protected static final Logger LOG = LoggerFactory.getLogger(FilteredScanTest.class.getName());
FilteredScanTest(Configuration conf, TestOptions options, Status status) {
super(conf, options, status);
@@ -1327,6 +1333,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
private void runNIsOne(final Class extends Test> cmd) {
Status status = new Status() {
+ @Override
public void setStatus(String msg) throws IOException {
LOG.info(msg);
}
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java
index 2b2e5e3700..179befe007 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java
@@ -37,14 +37,11 @@ import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import javax.xml.bind.Unmarshaller;
-import org.apache.http.Header;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
@@ -62,14 +59,17 @@ import org.apache.hadoop.hbase.rest.model.ScannerModel;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RestTests;
import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.http.Header;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({RestTests.class, MediumTests.class})
public class TestScannerResource {
- private static final Log LOG = LogFactory.getLog(TestScannerResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestScannerResource.class);
private static final TableName TABLE = TableName.valueOf("TestScannerResource");
private static final TableName TABLE_TO_BE_DISABLED = TableName.valueOf("ScannerResourceDisable");
private static final String NONEXISTENT_TABLE = "ThisTableDoesNotExist";
@@ -79,7 +79,7 @@ public class TestScannerResource {
private static final String COLUMN_2 = CFB + ":2";
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
- private static final HBaseRESTTestingUtility REST_TEST_UTIL =
+ private static final HBaseRESTTestingUtility REST_TEST_UTIL =
new HBaseRESTTestingUtility();
private static Client client;
private static JAXBContext context;
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java
index c8bbc24149..614b1a10ef 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java
@@ -34,8 +34,6 @@ import javax.xml.bind.JAXBContext;
import javax.xml.bind.Marshaller;
import javax.xml.bind.Unmarshaller;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -79,11 +77,13 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({RestTests.class, MediumTests.class})
public class TestScannersWithFilters {
- private static final Log LOG = LogFactory.getLog(TestScannersWithFilters.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestScannersWithFilters.class);
private static final TableName TABLE = TableName.valueOf("TestScannersWithFilters");
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java
index 4866d53c2f..04b23fa59f 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java
@@ -26,8 +26,6 @@ import java.util.Collection;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.http.Header;
import org.apache.http.message.BasicHeader;
@@ -55,11 +53,13 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({RestTests.class, MediumTests.class})
@RunWith(Parameterized.class)
public class TestSchemaResource {
- private static final Log LOG = LogFactory.getLog(TestSchemaResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSchemaResource.class);
private static String TABLE1 = "TestSchemaResource1";
private static String TABLE2 = "TestSchemaResource2";
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestStatusResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestStatusResource.java
index ca3b82f4ef..e86a4f8c0a 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestStatusResource.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestStatusResource.java
@@ -24,8 +24,6 @@ import java.io.IOException;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName;
@@ -44,10 +42,12 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({RestTests.class, MediumTests.class})
public class TestStatusResource {
- private static final Log LOG = LogFactory.getLog(TestStatusResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestStatusResource.class);
private static final byte[] META_REGION_NAME = Bytes.toBytes(TableName.META_TABLE_NAME + ",,1");
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java
index 26891774b7..55913499ef 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java
@@ -32,8 +32,6 @@ import java.util.List;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
@@ -64,10 +62,12 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({RestTests.class, MediumTests.class})
public class TestTableResource {
- private static final Log LOG = LogFactory.getLog(TestTableResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestTableResource.class);
private static TableName TABLE = TableName.valueOf("TestTableResource");
private static String COLUMN_FAMILY = "test";
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java
index 8380a0a4fb..a10fef013c 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java
@@ -43,8 +43,6 @@ import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.parsers.SAXParserFactory;
import javax.xml.stream.XMLStreamException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -67,6 +65,8 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.xml.sax.InputSource;
import org.xml.sax.XMLReader;
@@ -78,7 +78,7 @@ import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
@Category({RestTests.class, MediumTests.class})
public class TestTableScan {
- private static final Log LOG = LogFactory.getLog(TestTableScan.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestTableScan.class);
private static final TableName TABLE = TableName.valueOf("TestScanResource");
private static final String CFA = "a";
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
index 1f927f597e..50cb0854b3 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
@@ -25,8 +25,6 @@ import javax.ws.rs.core.MediaType;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.rest.client.Client;
import org.apache.hadoop.hbase.rest.client.Cluster;
@@ -48,10 +46,12 @@ import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({RestTests.class, MediumTests.class})
public class TestVersionResource {
- private static final Log LOG = LogFactory.getLog(TestVersionResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestVersionResource.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final HBaseRESTTestingUtility REST_TEST_UTIL =
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestXmlParsing.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestXmlParsing.java
index 586e33c183..cf5519e7a7 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestXmlParsing.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestXmlParsing.java
@@ -25,8 +25,6 @@ import static org.mockito.Mockito.when;
import java.io.IOException;
import javax.xml.bind.UnmarshalException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.rest.Constants;
import org.apache.hadoop.hbase.rest.model.StorageClusterVersionModel;
@@ -34,13 +32,15 @@ import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.util.StringUtils;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test class for {@link RemoteAdmin} to verify XML is parsed in a certain manner.
*/
@Category(SmallTests.class)
public class TestXmlParsing {
- private static final Log LOG = LogFactory.getLog(TestXmlParsing.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestXmlParsing.class);
@Test
public void testParsingClusterVersion() throws Exception {
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java
index c41128d737..81de4361bb 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java
@@ -23,16 +23,16 @@ import java.util.Iterator;
import javax.xml.bind.JAXBContext;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.testclassification.RestTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({RestTests.class, SmallTests.class})
public class TestTableSchemaModel extends TestModelBase {
- private static final Log LOG = LogFactory.getLog(TestTableSchemaModel.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestTableSchemaModel.class);
public static final String TABLE_NAME = "testTable";
private static final boolean IS_META = false;
diff --git a/hbase-rsgroup/pom.xml b/hbase-rsgroup/pom.xml
index c53dc953e5..dba581046e 100644
--- a/hbase-rsgroup/pom.xml
+++ b/hbase-rsgroup/pom.xml
@@ -129,8 +129,8 @@
commons-lang3
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apiorg.apache.hbase.thirdparty
diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
index e332f5ccb0..75588408e4 100644
--- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
+++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
@@ -29,8 +29,7 @@ import java.util.stream.Collectors;
import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
import com.google.protobuf.Service;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.NamespaceDescriptor;
@@ -77,12 +76,14 @@ import org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.RemoveServe
import org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.RemoveServersResponse;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
// TODO: Encapsulate MasterObserver functions into separate subclass.
@CoreCoprocessor
@InterfaceAudience.Private
public class RSGroupAdminEndpoint implements MasterCoprocessor, MasterObserver {
- private static final Log LOG = LogFactory.getLog(RSGroupAdminEndpoint.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RSGroupAdminEndpoint.class);
private MasterServices master = null;
// Only instance of RSGroupInfoManager. RSGroup aware load balancers ask for this instance on
diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
index 45421e325b..b4d35e306c 100644
--- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
+++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
@@ -29,8 +29,6 @@ import java.util.Map;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
@@ -48,13 +46,15 @@ import org.apache.hadoop.hbase.net.Address;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Service to support Region Server Grouping (HBase-6721).
*/
@InterfaceAudience.Private
public class RSGroupAdminServer implements RSGroupAdmin {
- private static final Log LOG = LogFactory.getLog(RSGroupAdminServer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RSGroupAdminServer.class);
private MasterServices master;
private final RSGroupInfoManager rsGroupInfoManager;
diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
index 0c24ce51e9..a228a768e5 100644
--- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
+++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
@@ -29,8 +29,7 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.HBaseIOException;
@@ -52,6 +51,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* GroupBasedLoadBalancer, used when Region Server Grouping is configured (HBase-6721)
@@ -69,7 +70,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class RSGroupBasedLoadBalancer implements RSGroupableBalancer {
- private static final Log LOG = LogFactory.getLog(RSGroupBasedLoadBalancer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RSGroupBasedLoadBalancer.class);
private Configuration config;
private ClusterStatus clusterStatus;
diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
index 4d8ff92609..67dfde729f 100644
--- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
+++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
@@ -34,8 +34,6 @@ import java.util.SortedSet;
import java.util.TreeSet;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
@@ -81,7 +79,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
@@ -116,7 +115,7 @@ import com.google.protobuf.ServiceException;
*/
@InterfaceAudience.Private
class RSGroupInfoManagerImpl implements RSGroupInfoManager {
- private static final Log LOG = LogFactory.getLog(RSGroupInfoManagerImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RSGroupInfoManagerImpl.class);
/** Table descriptor for hbase:rsgroup catalog table */
private final static HTableDescriptor RSGROUP_TABLE_DESC;
@@ -624,7 +623,7 @@ class RSGroupInfoManagerImpl implements RSGroupInfoManager {
* done asynchronously in this thread.
*/
private class ServerEventsListenerThread extends Thread implements ServerListener {
- private final Log LOG = LogFactory.getLog(ServerEventsListenerThread.class);
+ private final Logger LOG = LoggerFactory.getLogger(ServerEventsListenerThread.class);
private boolean changed = false;
ServerEventsListenerThread() {
@@ -738,7 +737,7 @@ class RSGroupInfoManagerImpl implements RSGroupInfoManager {
}
private class RSGroupStartupWorker extends Thread {
- private final Log LOG = LogFactory.getLog(RSGroupStartupWorker.class);
+ private final Logger LOG = LoggerFactory.getLogger(RSGroupStartupWorker.class);
private volatile boolean online = false;
RSGroupStartupWorker() {
diff --git a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRSGroupBasedLoadBalancer.java b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRSGroupBasedLoadBalancer.java
index 5ce0c09f67..797022c169 100644
--- a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRSGroupBasedLoadBalancer.java
+++ b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRSGroupBasedLoadBalancer.java
@@ -36,8 +36,6 @@ import java.util.TreeMap;
import java.util.TreeSet;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HTableDescriptor;
@@ -63,7 +61,8 @@ import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@@ -71,7 +70,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@Category(SmallTests.class)
public class TestRSGroupBasedLoadBalancer {
- private static final Log LOG = LogFactory.getLog(TestRSGroupBasedLoadBalancer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRSGroupBasedLoadBalancer.class);
private static RSGroupBasedLoadBalancer loadBalancer;
private static SecureRandom rand;
diff --git a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java
index f2ae112c60..378c1ab0a9 100644
--- a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java
+++ b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java
@@ -24,8 +24,6 @@ import static org.junit.Assert.fail;
import java.io.IOException;
import java.util.Iterator;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
@@ -53,12 +51,13 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
@Category({MediumTests.class})
public class TestRSGroups extends TestRSGroupsBase {
- protected static final Log LOG = LogFactory.getLog(TestRSGroups.class);
+ protected static final Logger LOG = LoggerFactory.getLogger(TestRSGroups.class);
private static HMaster master;
private static boolean INIT = false;
private static RSGroupAdminEndpoint rsGroupAdminEndpoint;
diff --git a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
index bbcf1206af..cd8c3869e5 100644
--- a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
+++ b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java
@@ -34,8 +34,6 @@ import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.ClusterStatus.Option;
import org.apache.hadoop.hbase.HBaseCluster;
@@ -59,7 +57,8 @@ import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -67,7 +66,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequest;
public abstract class TestRSGroupsBase {
- protected static final Log LOG = LogFactory.getLog(TestRSGroupsBase.class);
+ protected static final Logger LOG = LoggerFactory.getLogger(TestRSGroupsBase.class);
@Rule
public TestName name = new TestName();
diff --git a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsOfflineMode.java b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsOfflineMode.java
index 6f7b47d554..dd190fe1f9 100644
--- a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsOfflineMode.java
+++ b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsOfflineMode.java
@@ -17,9 +17,9 @@
*/
package org.apache.hadoop.hbase.rsgroup;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+
import org.apache.hadoop.hbase.HBaseCluster;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
@@ -41,11 +41,10 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
// This tests that GroupBasedBalancer will use data in zk to do balancing during master startup.
// This does not test retain assignment.
@@ -56,7 +55,7 @@ import static org.junit.Assert.assertFalse;
// assignment with a timeout.
@Category(MediumTests.class)
public class TestRSGroupsOfflineMode {
- private static final Log LOG = LogFactory.getLog(TestRSGroupsOfflineMode.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRSGroupsOfflineMode.class);
private static HMaster master;
private static Admin hbaseAdmin;
private static HBaseTestingUtility TEST_UTIL;
diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml
index 52e8a522b7..ac11283404 100644
--- a/hbase-server/pom.xml
+++ b/hbase-server/pom.xml
@@ -405,7 +405,6 @@
org.apache.hbasehbase-resource-bundle
- ${project.version}true
@@ -492,13 +491,17 @@
commons-lang3
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apiorg.apache.commonscommons-math3
+
+ org.slf4j
+ slf4j-log4j12
+ log4jlog4j
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthCheckChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthCheckChore.java
index 5f3531d2e0..8ab139f63c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthCheckChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthCheckChore.java
@@ -17,17 +17,17 @@
*/
package org.apache.hadoop.hbase;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HealthChecker.HealthCheckerExitStatus;
import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* The Class HealthCheckChore for running health checker regularly.
*/
public class HealthCheckChore extends ScheduledChore {
- private static final Log LOG = LogFactory.getLog(HealthCheckChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HealthCheckChore.class);
private HealthChecker healthChecker;
private Configuration config;
private int threshold;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthChecker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthChecker.java
index 45e0f3aff9..a43a51d865 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthChecker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthChecker.java
@@ -20,10 +20,10 @@ package org.apache.hadoop.hbase;
import java.io.IOException;
import java.util.ArrayList;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.util.Shell.ExitCodeException;
import org.apache.hadoop.util.Shell.ShellCommandExecutor;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A utility for executing an external script that checks the health of
@@ -33,7 +33,7 @@ import org.apache.hadoop.util.Shell.ShellCommandExecutor;
*/
class HealthChecker {
- private static final Log LOG = LogFactory.getLog(HealthChecker.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HealthChecker.class);
private ShellCommandExecutor shexec = null;
private String exceptionStackTrace;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java
index 1472057027..6fdc77e34d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java
@@ -19,11 +19,12 @@
package org.apache.hadoop.hbase;
import com.google.protobuf.Service;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.lang.management.ManagementFactory;
@@ -49,7 +50,7 @@ import javax.management.remote.rmi.RMIConnectorServer;
* 3)support subset of SSL (with default configuration)
*/
public class JMXListener implements MasterCoprocessor, RegionServerCoprocessor {
- private static final Log LOG = LogFactory.getLog(JMXListener.class);
+ private static final Logger LOG = LoggerFactory.getLogger(JMXListener.class);
public static final String RMI_REGISTRY_PORT_CONF_KEY = ".rmi.registry.port";
public static final String RMI_CONNECTOR_PORT_CONF_KEY = ".rmi.connector.port";
public static final int defMasterRMIRegistryPort = 10101;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java
index e43d33bb0e..06199f72d6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java
@@ -24,9 +24,9 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
@@ -58,7 +58,7 @@ import org.apache.hadoop.hbase.util.JVMClusterUtil;
*/
@InterfaceAudience.Public
public class LocalHBaseCluster {
- private static final Log LOG = LogFactory.getLog(LocalHBaseCluster.class);
+ private static final Logger LOG = LoggerFactory.getLogger(LocalHBaseCluster.class);
private final List masterThreads = new CopyOnWriteArrayList<>();
private final List regionThreads = new CopyOnWriteArrayList<>();
private final static int DEFAULT_NO = 1;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ZKNamespaceManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ZKNamespaceManager.java
index 9e228ad40c..e088751a1a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ZKNamespaceManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ZKNamespaceManager.java
@@ -18,25 +18,25 @@
package org.apache.hadoop.hbase;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.zookeeper.ZKUtil;
-import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
-import org.apache.hadoop.hbase.zookeeper.ZKListener;
-import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-import org.apache.zookeeper.KeeperException;
-
import java.io.IOException;
import java.util.List;
import java.util.NavigableMap;
import java.util.NavigableSet;
import java.util.concurrent.ConcurrentSkipListMap;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.zookeeper.ZKListener;
+import org.apache.hadoop.hbase.zookeeper.ZKUtil;
+import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
+import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
/**
* Class servers two purposes:
@@ -49,7 +49,7 @@ import java.util.concurrent.ConcurrentSkipListMap;
*/
@InterfaceAudience.Private
public class ZKNamespaceManager extends ZKListener {
- private static final Log LOG = LogFactory.getLog(ZKNamespaceManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ZKNamespaceManager.class);
private final String nsZNode;
private final NavigableMap cache;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ZNodeClearer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ZNodeClearer.java
index cda5affac4..3e911a8dd6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ZNodeClearer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ZNodeClearer.java
@@ -26,8 +26,6 @@ import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer;
import org.apache.hadoop.hbase.zookeeper.MasterAddressTracker;
@@ -35,6 +33,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
*
Contains a set of methods for the collaboration between the start/stop scripts and the
@@ -49,7 +49,7 @@ import org.apache.zookeeper.KeeperException;
* check its content to make sure that the backup server is not now in charge.
*/
public class ZNodeClearer {
- private static final Log LOG = LogFactory.getLog(ZNodeClearer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ZNodeClearer.class);
private ZNodeClearer() {}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java
index 4da1235fd4..354a63caa6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java
@@ -25,8 +25,6 @@ import java.util.Collection;
import java.util.Collections;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -41,7 +39,8 @@ import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.HFileArchiveUtil;
import org.apache.hadoop.io.MultipleIOException;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Function;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Collections2;
@@ -54,7 +53,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
*/
@InterfaceAudience.Private
public class HFileArchiver {
- private static final Log LOG = LogFactory.getLog(HFileArchiver.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HFileArchiver.class);
private static final String SEPARATOR = ".";
/** Number of retries in case of fs operation failure */
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveManager.java
index 389dea7fc0..c51d4937a1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveManager.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.backup.example;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
@@ -30,6 +28,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Client-side manager for which table's hfiles should be preserved for long-term archive.
@@ -41,7 +41,7 @@ import org.apache.zookeeper.KeeperException;
class HFileArchiveManager {
private final String archiveZnode;
- private static final Log LOG = LogFactory.getLog(HFileArchiveManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HFileArchiveManager.class);
private final ZKWatcher zooKeeper;
private volatile boolean stopped = false;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveTableMonitor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveTableMonitor.java
index 3a1653417d..93c9690d79 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveTableMonitor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveTableMonitor.java
@@ -21,8 +21,8 @@ import java.util.List;
import java.util.Set;
import java.util.TreeSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Monitor the actual tables for which HFiles are archived for long-term retention (always kept
@@ -31,7 +31,7 @@ import org.apache.commons.logging.LogFactory;
* It is internally synchronized to ensure consistent view of the table state.
*/
public class HFileArchiveTableMonitor {
- private static final Log LOG = LogFactory.getLog(HFileArchiveTableMonitor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HFileArchiveTableMonitor.class);
private final Set archivedTables = new TreeSet<>();
/**
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/LongTermArchivingHFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/LongTermArchivingHFileCleaner.java
index ff7a51d0a8..484ff5ea17 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/LongTermArchivingHFileCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/LongTermArchivingHFileCleaner.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.backup.example;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
@@ -30,6 +28,8 @@ import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.master.cleaner.BaseHFileCleanerDelegate;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* {@link BaseHFileCleanerDelegate} that only cleans HFiles that don't belong to a table that is
@@ -44,7 +44,7 @@ import org.apache.zookeeper.KeeperException;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class LongTermArchivingHFileCleaner extends BaseHFileCleanerDelegate {
- private static final Log LOG = LogFactory.getLog(LongTermArchivingHFileCleaner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(LongTermArchivingHFileCleaner.class);
TableHFileArchiveTracker archiveTracker;
private FileSystem fs;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/TableHFileArchiveTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/TableHFileArchiveTracker.java
index 73b50a652d..1b3b775b2a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/TableHFileArchiveTracker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/TableHFileArchiveTracker.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.backup.example;
import java.io.IOException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
@@ -29,6 +27,8 @@ import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZKListener;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Track HFile archiving state changes in ZooKeeper. Keeps track of the tables whose HFiles should
@@ -39,7 +39,7 @@ import org.apache.zookeeper.KeeperException;
*/
@InterfaceAudience.Private
public class TableHFileArchiveTracker extends ZKListener {
- private static final Log LOG = LogFactory.getLog(TableHFileArchiveTracker.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableHFileArchiveTracker.class);
public static final String HFILE_ARCHIVE_ZNODE_PARENT = "hfilearchive";
private HFileArchiveTableMonitor monitor;
private String archiveHFileZNode;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java
index 529a2f93d3..7a1a57814f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java
@@ -22,8 +22,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -34,6 +32,8 @@ import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.RegionScanner;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A client scanner for a region opened for read-only on the client side. Assumes region data
@@ -42,7 +42,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class ClientSideRegionScanner extends AbstractClientScanner {
- private static final Log LOG = LogFactory.getLog(ClientSideRegionScanner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ClientSideRegionScanner.class);
private HRegion region;
RegionScanner scanner;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/TableSnapshotScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/TableSnapshotScanner.java
index 36b2bb282d..93b1a4024c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/TableSnapshotScanner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/TableSnapshotScanner.java
@@ -24,8 +24,6 @@ import java.util.Collections;
import java.util.List;
import java.util.UUID;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -34,6 +32,8 @@ import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A Scanner which performs a scan over snapshot files. Using this class requires copying the
@@ -65,7 +65,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class TableSnapshotScanner extends AbstractClientScanner {
- private static final Log LOG = LogFactory.getLog(TableSnapshotScanner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableSnapshotScanner.class);
private Configuration conf;
private String snapshotName;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/locking/EntityLock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/locking/EntityLock.java
index 94a573c638..61a0238bca 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/locking/EntityLock.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/locking/EntityLock.java
@@ -23,11 +23,11 @@ import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse;
@@ -81,7 +81,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Public
public class EntityLock {
- private static final Log LOG = LogFactory.getLog(EntityLock.class);
+ private static final Logger LOG = LoggerFactory.getLogger(EntityLock.class);
public static final String HEARTBEAT_TIME_BUFFER =
"hbase.client.locks.heartbeat.time.buffer.ms";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/conf/ConfigurationManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/conf/ConfigurationManager.java
index 1fa70f48ce..2bbb90bbf6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/conf/ConfigurationManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/conf/ConfigurationManager.java
@@ -17,11 +17,11 @@
*/
package org.apache.hadoop.hbase.conf;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.util.Collections;
import java.util.Set;
@@ -73,7 +73,7 @@ import java.util.WeakHashMap;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class ConfigurationManager {
- private static final Log LOG = LogFactory.getLog(ConfigurationManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ConfigurationManager.class);
// The set of Configuration Observers. These classes would like to get
// notified when the configuration is reloaded from disk. This is a set
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/ConstraintProcessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/ConstraintProcessor.java
index 582fabf4eb..6aa5d977b6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/ConstraintProcessor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/ConstraintProcessor.java
@@ -22,9 +22,9 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.client.Put;
@@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.wal.WALEdit;
@InterfaceAudience.Private
public class ConstraintProcessor implements RegionCoprocessor, RegionObserver {
- private static final Log LOG = LogFactory.getLog(ConstraintProcessor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ConstraintProcessor.class);
private final ClassLoader classloader;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java
index e675cc9c4a..426e516153 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java
@@ -29,9 +29,9 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptor;
@@ -53,7 +53,7 @@ public final class Constraints {
private Constraints() {
}
- private static final Log LOG = LogFactory.getLog(Constraints.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Constraints.class);
private static final String CONSTRAINT_HTD_KEY_PREFIX = "constraint $";
private static final Pattern CONSTRAINT_HTD_ATTR_KEY_PATTERN = Pattern
.compile(CONSTRAINT_HTD_KEY_PREFIX, Pattern.LITERAL);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/ZKSplitLogManagerCoordination.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/ZKSplitLogManagerCoordination.java
index 8a07b4b7c1..0cf23c9120 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/ZKSplitLogManagerCoordination.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/ZKSplitLogManagerCoordination.java
@@ -29,8 +29,6 @@ import static org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent;
import java.io.IOException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
@@ -38,6 +36,7 @@ import org.apache.hadoop.hbase.SplitLogCounters;
import org.apache.hadoop.hbase.SplitLogTask;
import org.apache.hadoop.hbase.coordination.ZKSplitLogManagerCoordination.TaskFinisher.Status;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.master.SplitLogManager.ResubmitDirective;
import org.apache.hadoop.hbase.master.SplitLogManager.Task;
import org.apache.hadoop.hbase.master.SplitLogManager.TerminationStatus;
@@ -57,7 +56,8 @@ import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.KeeperException.NoNodeException;
import org.apache.zookeeper.ZooDefs.Ids;
import org.apache.zookeeper.data.Stat;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -72,7 +72,7 @@ public class ZKSplitLogManagerCoordination extends ZKListener implements
public static final int DEFAULT_ZK_RETRIES = 3;
public static final int DEFAULT_MAX_RESUBMIT = 3;
- private static final Log LOG = LogFactory.getLog(SplitLogManagerCoordination.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SplitLogManagerCoordination.class);
private final TaskFinisher taskFinisher;
private final Configuration conf;
@@ -301,7 +301,7 @@ public class ZKSplitLogManagerCoordination extends ZKListener implements
}
private void createRescanFailure() {
- LOG.fatal("logic failure, rescan failure must not happen");
+ LOG.error(HBaseMarkers.FATAL, "logic failure, rescan failure must not happen");
}
/**
@@ -353,7 +353,7 @@ public class ZKSplitLogManagerCoordination extends ZKListener implements
return;
}
SplitLogCounters.tot_mgr_null_data.increment();
- LOG.fatal("logic error - got null data " + path);
+ LOG.error(HBaseMarkers.FATAL, "logic error - got null data " + path);
setDone(path, FAILURE);
return;
}
@@ -382,8 +382,8 @@ public class ZKSplitLogManagerCoordination extends ZKListener implements
LOG.info("task " + path + " entered state: " + slt.toString());
resubmitOrFail(path, CHECK);
} else {
- LOG.fatal("logic error - unexpected zk state for path = " + path + " data = "
- + slt.toString());
+ LOG.error(HBaseMarkers.FATAL, "logic error - unexpected zk state for path = "
+ + path + " data = " + slt.toString());
setDone(path, FAILURE);
}
}
@@ -573,7 +573,7 @@ public class ZKSplitLogManagerCoordination extends ZKListener implements
* Asynchronous handler for zk create node results. Retries on failures.
*/
public class CreateAsyncCallback implements AsyncCallback.StringCallback {
- private final Log LOG = LogFactory.getLog(CreateAsyncCallback.class);
+ private final Logger LOG = LoggerFactory.getLogger(CreateAsyncCallback.class);
@Override
public void processResult(int rc, String path, Object ctx, String name) {
@@ -614,7 +614,7 @@ public class ZKSplitLogManagerCoordination extends ZKListener implements
* Asynchronous handler for zk get-data-set-watch on node results. Retries on failures.
*/
public class GetDataAsyncCallback implements AsyncCallback.DataCallback {
- private final Log LOG = LogFactory.getLog(GetDataAsyncCallback.class);
+ private final Logger LOG = LoggerFactory.getLogger(GetDataAsyncCallback.class);
@Override
public void processResult(int rc, String path, Object ctx, byte[] data, Stat stat) {
@@ -662,7 +662,7 @@ public class ZKSplitLogManagerCoordination extends ZKListener implements
* Asynchronous handler for zk delete node results. Retries on failures.
*/
public class DeleteAsyncCallback implements AsyncCallback.VoidCallback {
- private final Log LOG = LogFactory.getLog(DeleteAsyncCallback.class);
+ private final Logger LOG = LoggerFactory.getLogger(DeleteAsyncCallback.class);
@Override
public void processResult(int rc, String path, Object ctx) {
@@ -704,7 +704,7 @@ public class ZKSplitLogManagerCoordination extends ZKListener implements
* {@link org.apache.hadoop.hbase.regionserver.SplitLogWorker}s to rescan for new tasks.
*/
public class CreateRescanAsyncCallback implements AsyncCallback.StringCallback {
- private final Log LOG = LogFactory.getLog(CreateRescanAsyncCallback.class);
+ private final Logger LOG = LoggerFactory.getLogger(CreateRescanAsyncCallback.class);
@Override
public void processResult(int rc, String path, Object ctx, String name) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/ZkSplitLogWorkerCoordination.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/ZkSplitLogWorkerCoordination.java
index 0540a8f858..bcba101b31 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/ZkSplitLogWorkerCoordination.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/ZkSplitLogWorkerCoordination.java
@@ -26,8 +26,6 @@ import java.util.concurrent.atomic.LongAdder;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.mutable.MutableInt;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -36,6 +34,7 @@ import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.SplitLogCounters;
import org.apache.hadoop.hbase.SplitLogTask;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
import org.apache.hadoop.hbase.regionserver.SplitLogWorker;
import org.apache.hadoop.hbase.regionserver.SplitLogWorker.TaskExecutor;
@@ -54,6 +53,8 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.AsyncCallback;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.data.Stat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* ZooKeeper based implementation of {@link SplitLogWorkerCoordination}
@@ -64,7 +65,7 @@ import org.apache.zookeeper.data.Stat;
public class ZkSplitLogWorkerCoordination extends ZKListener implements
SplitLogWorkerCoordination {
- private static final Log LOG = LogFactory.getLog(ZkSplitLogWorkerCoordination.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ZkSplitLogWorkerCoordination.class);
private static final int checkInterval = 5000; // 5 seconds
private static final int FAILED_TO_OWN_TASK = -1;
@@ -539,7 +540,7 @@ public class ZkSplitLogWorkerCoordination extends ZKListener implements
* Asynchronous handler for zk get-data-set-watch on node results.
*/
class GetDataAsyncCallback implements AsyncCallback.DataCallback {
- private final Log LOG = LogFactory.getLog(GetDataAsyncCallback.class);
+ private final Logger LOG = LoggerFactory.getLogger(GetDataAsyncCallback.class);
@Override
public void processResult(int rc, String path, Object ctx, byte[] data, Stat stat) {
@@ -580,7 +581,7 @@ public class ZkSplitLogWorkerCoordination extends ZKListener implements
LOG.warn("transisition task " + task + " to " + slt + " failed because of version mismatch",
bve);
} catch (KeeperException.NoNodeException e) {
- LOG.fatal(
+ LOG.error(HBaseMarkers.FATAL,
"logic error - end task " + task + " " + slt + " failed because task doesn't exist", e);
} catch (KeeperException e) {
LOG.warn("failed to end task, " + task + " " + slt, e);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEnvironment.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEnvironment.java
index 9f5ca231f5..2818dcd675 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEnvironment.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEnvironment.java
@@ -19,13 +19,13 @@
package org.apache.hadoop.hbase.coprocessor;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.util.VersionInfo;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
@@ -34,7 +34,7 @@ import java.io.IOException;
*/
@InterfaceAudience.Private
public class BaseEnvironment implements CoprocessorEnvironment {
- private static final Log LOG = LogFactory.getLog(BaseEnvironment.class);
+ private static final Logger LOG = LoggerFactory.getLogger(BaseEnvironment.class);
/** The coprocessor */
public C impl;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java
index f2d9b2a877..9489d69dac 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java
@@ -32,9 +32,9 @@ import java.util.concurrent.ConcurrentSkipListSet;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Abortable;
@@ -75,7 +75,7 @@ public abstract class CoprocessorHost coprocEnvironments =
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignExceptionDispatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignExceptionDispatcher.java
index 75117fd6e8..87f7c00064 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignExceptionDispatcher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignExceptionDispatcher.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.errorhandling;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* The dispatcher acts as the state holding entity for foreign error handling. The first
@@ -40,7 +40,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class ForeignExceptionDispatcher implements ForeignExceptionListener, ForeignExceptionSnare {
- private static final Log LOG = LogFactory.getLog(ForeignExceptionDispatcher.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ForeignExceptionDispatcher.class);
protected final String name;
protected final List listeners = new ArrayList<>();
private ForeignException exception;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/TimeoutExceptionInjector.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/TimeoutExceptionInjector.java
index 294e108ebd..36182d677d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/TimeoutExceptionInjector.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/TimeoutExceptionInjector.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.errorhandling;
import java.util.Timer;
import java.util.TimerTask;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
/**
@@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@InterfaceAudience.Private
public class TimeoutExceptionInjector {
- private static final Log LOG = LogFactory.getLog(TimeoutExceptionInjector.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TimeoutExceptionInjector.class);
private final long maxTime;
private volatile boolean complete;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java
index 1056c20453..eb94744299 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java
@@ -21,14 +21,14 @@ package org.apache.hadoop.hbase.executor;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.trace.TraceUtil;
import org.apache.htrace.core.Span;
import org.apache.htrace.core.TraceScope;
import org.apache.htrace.core.Tracer;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Abstract base class for all HBase event handlers. Subclasses should
@@ -53,7 +53,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public abstract class EventHandler implements Runnable, Comparable {
- private static final Log LOG = LogFactory.getLog(EventHandler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(EventHandler.class);
// type of event this object represents
protected EventType eventType;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java
index 7117d360d1..4cd800c178 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java
@@ -32,9 +32,9 @@ import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.monitoring.ThreadMonitoring;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
@@ -56,7 +56,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa
*/
@InterfaceAudience.Private
public class ExecutorService {
- private static final Log LOG = LogFactory.getLog(ExecutorService.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ExecutorService.class);
// hold the all the executors created in a map addressable by their names
private final ConcurrentHashMap executorMap = new ConcurrentHashMap<>();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.java
index 136453a99c..74f7c69c41 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.java
@@ -32,8 +32,6 @@ import java.util.Random;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilderFactory;
@@ -52,6 +50,9 @@ import org.apache.hadoop.hbase.master.RackManager;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -67,7 +68,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNode
*/
@InterfaceAudience.Private
public class FavoredNodeAssignmentHelper {
- private static final Log LOG = LogFactory.getLog(FavoredNodeAssignmentHelper.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FavoredNodeAssignmentHelper.class);
private RackManager rackManager;
private Map> rackToRegionServerMap;
private List uniqueRackList;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java
index 68e5e897d3..a2cfa8543a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java
@@ -29,8 +29,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@@ -46,7 +44,8 @@ import org.apache.hadoop.hbase.master.SnapshotOfRegionAssignmentFromMeta;
import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
@@ -67,7 +66,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class FavoredNodeLoadBalancer extends BaseLoadBalancer implements FavoredNodesPromoter {
- private static final Log LOG = LogFactory.getLog(FavoredNodeLoadBalancer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FavoredNodeLoadBalancer.class);
private RackManager rackManager;
private Configuration conf;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodesManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodesManager.java
index 7705b3d1b7..e0b9dc5b5d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodesManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodesManager.java
@@ -32,8 +32,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.ServerName;
@@ -45,7 +43,8 @@ import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.net.NetUtils;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
@@ -64,7 +63,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
@InterfaceAudience.Private
public class FavoredNodesManager {
- private static final Log LOG = LogFactory.getLog(FavoredNodesManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FavoredNodesManager.class);
private FavoredNodesPlan globalFavoredNodesAssignmentPlan;
private Map> primaryRSToRegionMap;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java
index c48d9d679f..0723f855af 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java
@@ -31,8 +31,6 @@ import java.lang.reflect.Proxy;
import java.lang.reflect.UndeclaredThrowableException;
import java.net.URI;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
@@ -54,6 +52,8 @@ import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockStoragePolicySuite;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.util.Progressable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import edu.umd.cs.findbugs.annotations.Nullable;
@@ -63,7 +63,7 @@ import edu.umd.cs.findbugs.annotations.Nullable;
* separate filesystem objects for reading and writing hfiles and wals.
*/
public class HFileSystem extends FilterFileSystem {
- public static final Log LOG = LogFactory.getLog(HFileSystem.class);
+ public static final Logger LOG = LoggerFactory.getLogger(HFileSystem.class);
private final FileSystem noChecksumFs; // read hfile data from storage
private final boolean useHBaseChecksum;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java
index cd3843f12a..faeac788ae 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java
@@ -24,12 +24,12 @@ import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import org.apache.commons.io.IOUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
@@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public class FSDataInputStreamWrapper implements Closeable {
- private static final Log LOG = LogFactory.getLog(FSDataInputStreamWrapper.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FSDataInputStreamWrapper.class);
private static final boolean isLogTraceEnabled = LOG.isTraceEnabled();
private final HFileSystem hfs;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java
index 52597b8297..42f3483648 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java
@@ -26,9 +26,9 @@ import java.io.InputStream;
import java.io.FileNotFoundException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.CanSetDropBehind;
import org.apache.hadoop.fs.CanSetReadahead;
import org.apache.hadoop.fs.FSDataInputStream;
@@ -92,7 +92,7 @@ import org.apache.hadoop.ipc.RemoteException;
*/
@InterfaceAudience.Private
public class FileLink {
- private static final Log LOG = LogFactory.getLog(FileLink.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FileLink.class);
/** Define the Back-reference directory name prefix: .links-<hfile>/ */
public static final String BACK_REFERENCES_DIRECTORY_PREFIX = ".links-";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HFileLink.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HFileLink.java
index ee1038608b..2aebdf0d04 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HFileLink.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HFileLink.java
@@ -22,8 +22,6 @@ import java.io.IOException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -38,6 +36,8 @@ import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.HFileArchiveUtil;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* HFileLink describes a link to an hfile.
@@ -58,7 +58,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="EQ_DOESNT_OVERRIDE_EQUALS",
justification="To be fixed but warning suppressed for now")
public class HFileLink extends FileLink {
- private static final Log LOG = LogFactory.getLog(HFileLink.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HFileLink.class);
/**
* A non-capture group, for HFileLink, so that this can be embedded.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java
index 1dfffd6cff..80207eb73e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java
@@ -23,9 +23,9 @@ import java.nio.ByteBuffer;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -55,7 +55,7 @@ import org.apache.hadoop.hbase.util.Bytes;
*/
@InterfaceAudience.Private
public class HalfStoreFileReader extends StoreFileReader {
- private static final Log LOG = LogFactory.getLog(HalfStoreFileReader.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HalfStoreFileReader.class);
final boolean top;
// This is the key we split around. Its the first possible entry on a row:
// i.e. empty column and a timestamp of LATEST_TIMESTAMP.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java
index a7c26e0077..41057f1741 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java
@@ -38,8 +38,6 @@ import java.util.EnumSet;
import java.util.List;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.CryptoProtocolVersion;
import org.apache.hadoop.crypto.Encryptor;
@@ -85,7 +83,8 @@ import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.DataChecksum;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap;
import org.apache.hadoop.hbase.shaded.io.netty.bootstrap.Bootstrap;
@@ -116,8 +115,8 @@ import org.apache.hadoop.hbase.shaded.io.netty.util.concurrent.Promise;
*/
@InterfaceAudience.Private
public final class FanOutOneBlockAsyncDFSOutputHelper {
-
- private static final Log LOG = LogFactory.getLog(FanOutOneBlockAsyncDFSOutputHelper.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(FanOutOneBlockAsyncDFSOutputHelper.class);
private FanOutOneBlockAsyncDFSOutputHelper() {
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java
index 458df27ac2..20c8da742c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java
@@ -74,8 +74,6 @@ import javax.security.sasl.SaslException;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.CipherOption;
import org.apache.hadoop.crypto.CipherSuite;
@@ -85,6 +83,9 @@ import org.apache.hadoop.crypto.Encryptor;
import org.apache.hadoop.crypto.key.KeyProvider.KeyVersion;
import org.apache.hadoop.fs.FileEncryptionInfo;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import com.google.protobuf.ByteString;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
@@ -107,8 +108,8 @@ import org.apache.hadoop.security.token.Token;
*/
@InterfaceAudience.Private
public final class FanOutOneBlockAsyncDFSOutputSaslHelper {
-
- private static final Log LOG = LogFactory.getLog(FanOutOneBlockAsyncDFSOutputSaslHelper.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(FanOutOneBlockAsyncDFSOutputSaslHelper.class);
private FanOutOneBlockAsyncDFSOutputSaslHelper() {
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
index a071fbdc89..242463cd65 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
@@ -22,11 +22,11 @@ import static org.apache.hadoop.hbase.HConstants.BUCKET_CACHE_SIZE_KEY;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory;
import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;
@@ -42,7 +42,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public class CacheConfig {
- private static final Log LOG = LogFactory.getLog(CacheConfig.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(CacheConfig.class.getName());
/**
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java
index 8342788ea4..5eb182640f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java
@@ -21,10 +21,10 @@ import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.ChecksumException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.ChecksumType;
import org.apache.hadoop.util.DataChecksum;
@@ -33,7 +33,7 @@ import org.apache.hadoop.util.DataChecksum;
*/
@InterfaceAudience.Private
public class ChecksumUtil {
- public static final Log LOG = LogFactory.getLog(ChecksumUtil.class);
+ public static final Logger LOG = LoggerFactory.getLogger(ChecksumUtil.class);
/** This is used to reserve space in a byte buffer */
private static byte[] DUMMY_VALUE = new byte[128 * HFileBlock.CHECKSUM_SIZE];
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterWriter.java
index 7e5db088ff..0b58b21561 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterWriter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterWriter.java
@@ -23,14 +23,14 @@ import java.io.IOException;
import java.util.LinkedList;
import java.util.Queue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.hbase.util.BloomFilterChunk;
import org.apache.hadoop.hbase.util.BloomFilterUtil;
@@ -47,8 +47,8 @@ import org.apache.hadoop.io.Writable;
public class CompoundBloomFilterWriter extends CompoundBloomFilterBase
implements BloomFilterWriter, InlineBlockWriter {
- private static final Log LOG =
- LogFactory.getLog(CompoundBloomFilterWriter.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(CompoundBloomFilterWriter.class);
/** The current chunk being written to */
private BloomFilterChunk chunk;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
index d63c120212..e0c2c796b4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
@@ -38,8 +38,6 @@ import java.util.TreeMap;
import java.util.concurrent.atomic.LongAdder;
import org.apache.commons.io.IOUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -51,6 +49,8 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
import org.apache.hadoop.hbase.io.MetricsIO;
@@ -140,7 +140,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
@InterfaceAudience.Private
public class HFile {
// LOG is being used in HFileBlock and CheckSumUtil
- static final Log LOG = LogFactory.getLog(HFile.class);
+ static final Logger LOG = LoggerFactory.getLogger(HFile.class);
/**
* Maximum length of key in HFile.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
index c6c7446f3f..39ba6cd6d3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
@@ -27,14 +27,14 @@ import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.io.ByteArrayOutputStream;
import org.apache.hadoop.hbase.io.ByteBuffInputStream;
@@ -110,7 +110,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
*/
@InterfaceAudience.Private
public class HFileBlock implements Cacheable {
- private static final Log LOG = LogFactory.getLog(HFileBlock.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HFileBlock.class);
// Block Header fields.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java
index 557a69c6b0..7b8815f625 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java
@@ -30,8 +30,6 @@ import java.util.Collections;
import java.util.List;
import java.util.concurrent.atomic.AtomicReference;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.hbase.ByteBufferKeyOnlyKeyValue;
@@ -43,6 +41,8 @@ import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KeyOnlyKeyValue;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.HFile.CachingBlockReader;
@@ -68,7 +68,7 @@ import org.apache.hadoop.util.StringUtils;
@InterfaceAudience.Private
public class HFileBlockIndex {
- private static final Log LOG = LogFactory.getLog(HFileBlockIndex.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HFileBlockIndex.class);
static final int DEFAULT_MAX_CHUNK_SIZE = 128 * 1024;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
index 5aea107cdc..639130db00 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
@@ -46,8 +46,6 @@ import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
@@ -80,6 +78,8 @@ import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.codahale.metrics.ConsoleReporter;
import com.codahale.metrics.Counter;
@@ -99,7 +99,7 @@ import com.codahale.metrics.Timer;
@InterfaceStability.Evolving
public class HFilePrettyPrinter extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(HFilePrettyPrinter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HFilePrettyPrinter.class);
private Options options = new Options();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java
index 5021b4d1de..22e38bf58e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java
@@ -26,8 +26,6 @@ import java.util.List;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
@@ -43,6 +41,8 @@ import org.apache.hadoop.hbase.SizeCachedKeyValue;
import org.apache.hadoop.hbase.SizeCachedNoTagsKeyValue;
import org.apache.hadoop.hbase.trace.TraceUtil;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
import org.apache.hadoop.hbase.io.compress.Compression;
@@ -74,7 +74,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
// one file. Ditto for all the HFileReader.ScannerV? implementations. I was running up against
// the MaxInlineLevel limit because too many tiers involved reading from an hfile. Was also hard
// to navigate the source code when so many classes participating in read.
- private static final Log LOG = LogFactory.getLog(HFileReaderImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HFileReaderImpl.class);
/** Data block index reader keeping the root data index in memory */
private HFileBlockIndex.CellBasedKeyBlockIndexReader dataBlockIndexReader;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java
index 8c631ebb31..50d5ddc66e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java
@@ -26,8 +26,6 @@ import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
@@ -42,6 +40,8 @@ import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.CellComparatorImpl.MetaCellComparator;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.crypto.Encryption;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
@@ -60,7 +60,7 @@ import org.apache.hadoop.io.Writable;
*/
@InterfaceAudience.Private
public class HFileWriterImpl implements HFile.Writer {
- private static final Log LOG = LogFactory.getLog(HFileWriterImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HFileWriterImpl.class);
private static final long UNSET = -1;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
index 3733535df1..43238d930f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
@@ -34,10 +34,10 @@ import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.LongAdder;
import java.util.concurrent.locks.ReentrantLock;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.util.Bytes;
@@ -99,7 +99,7 @@ import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
@JsonIgnoreProperties({"encodingCountsForTest"})
public class LruBlockCache implements ResizableBlockCache, HeapSize {
- private static final Log LOG = LogFactory.getLog(LruBlockCache.class);
+ private static final Logger LOG = LoggerFactory.getLogger(LruBlockCache.class);
/**
* Percentage of total size that eviction will evict until; e.g. if set to .8, then we will keep
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java
index 838fa41889..ce8d53338e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java
@@ -29,17 +29,16 @@ import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class PrefetchExecutor {
- private static final Log LOG = LogFactory.getLog(PrefetchExecutor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(PrefetchExecutor.class);
/** Futures for tracking block prefetch activity */
private static final Map> prefetchFutures = new ConcurrentSkipListMap<>();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java
index 40b64be18d..8586967d86 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java
@@ -31,9 +31,9 @@ import java.util.concurrent.atomic.LongAdder;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.MinMaxPriorityQueue;
import org.apache.commons.collections4.map.LinkedMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.BucketEntry;
@@ -54,7 +54,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.primitives.Ints;
@InterfaceAudience.Private
@JsonIgnoreProperties({"indexStatistics", "freeSize", "usedSize"})
public final class BucketAllocator {
- private static final Log LOG = LogFactory.getLog(BucketAllocator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(BucketAllocator.class);
@JsonIgnoreProperties({"completelyFree", "uninstantiated"})
public final static class Bucket {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
index 0ced7c1caa..ee6eca4bf8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
@@ -54,11 +54,12 @@ import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import com.google.common.base.Preconditions;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.hfile.BlockCache;
import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
@@ -102,7 +103,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa
*/
@InterfaceAudience.Private
public class BucketCache implements BlockCache, HeapSize {
- private static final Log LOG = LogFactory.getLog(BucketCache.class);
+ private static final Logger LOG = LoggerFactory.getLogger(BucketCache.class);
/** Priority buckets config */
static final String SINGLE_FACTOR_CONFIG_NAME = "hbase.bucketcache.single.factor";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
index ad1c394d7c..9c19c8846c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
@@ -28,9 +28,10 @@ import java.nio.channels.FileChannel;
import java.util.Arrays;
import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.io.hfile.Cacheable;
import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer;
import org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;
@@ -44,7 +45,7 @@ import org.apache.hadoop.util.StringUtils;
*/
@InterfaceAudience.Private
public class FileIOEngine implements IOEngine {
- private static final Log LOG = LogFactory.getLog(FileIOEngine.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FileIOEngine.class);
public static final String FILE_DELIMITER = ",";
private final String[] filePaths;
private final FileChannel[] fileChannels;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileMmapEngine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileMmapEngine.java
index 4fe39d38ae..e2f019114b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileMmapEngine.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileMmapEngine.java
@@ -23,9 +23,9 @@ import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.io.hfile.Cacheable;
import org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;
import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer;
@@ -41,7 +41,7 @@ import org.apache.hadoop.util.StringUtils;
*/
@InterfaceAudience.Private
public class FileMmapEngine implements IOEngine {
- static final Log LOG = LogFactory.getLog(FileMmapEngine.class);
+ static final Logger LOG = LoggerFactory.getLogger(FileMmapEngine.class);
private final String path;
private long size;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/util/MemorySizeUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/util/MemorySizeUtil.java
index 1f2025278d..471eb469b7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/util/MemorySizeUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/util/MemorySizeUtil.java
@@ -21,11 +21,11 @@ import java.lang.management.ManagementFactory;
import java.lang.management.MemoryType;
import java.lang.management.MemoryUsage;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.MemStoreLAB;
import org.apache.hadoop.hbase.util.Pair;
@@ -51,7 +51,7 @@ public class MemorySizeUtil {
// Default lower water mark limit is 95% size of memstore size.
public static final float DEFAULT_MEMSTORE_SIZE_LOWER_LIMIT = 0.95f;
- private static final Log LOG = LogFactory.getLog(MemorySizeUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MemorySizeUtil.class);
// a constant to convert a fraction to a percentage
private static final int CONVERT_TO_PERCENTAGE = 100;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/FifoRpcScheduler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/FifoRpcScheduler.java
index 679f237721..d36b468a9e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/FifoRpcScheduler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/FifoRpcScheduler.java
@@ -17,18 +17,19 @@
*/
package org.apache.hadoop.hbase.ipc;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.DaemonThreadFactory;
-import org.apache.hadoop.hbase.shaded.io.netty.util.internal.StringUtil;
-
import java.io.IOException;
+import java.util.HashMap;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
-import java.util.HashMap;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.DaemonThreadFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.hadoop.hbase.shaded.io.netty.util.internal.StringUtil;
/**
* A very simple {@code }RpcScheduler} that serves incoming requests in order.
@@ -36,7 +37,7 @@ import java.util.HashMap;
* This can be used for HMaster, where no prioritization is needed.
*/
public class FifoRpcScheduler extends RpcScheduler {
- private static final Log LOG = LogFactory.getLog(FifoRpcScheduler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FifoRpcScheduler.class);
private final int handlerCount;
private final int maxQueueLength;
private final AtomicInteger queueSize = new AtomicInteger(0);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcServer.java
index 91c468f197..7ac0d14e84 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcServer.java
@@ -38,13 +38,13 @@ import java.net.InetSocketAddress;
import java.util.List;
import java.util.concurrent.CountDownLatch;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.Server;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.security.HBasePolicyProvider;
@@ -63,7 +63,7 @@ import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.CONFIG})
public class NettyRpcServer extends RpcServer {
- public static final Log LOG = LogFactory.getLog(NettyRpcServer.class);
+ public static final Logger LOG = LoggerFactory.getLogger(NettyRpcServer.class);
private final InetSocketAddress bindAddress;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java
index 86537c0b01..a8a7fe0da4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java
@@ -22,13 +22,13 @@ package org.apache.hadoop.hbase.ipc;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest;
@@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC, HBaseInterfaceAudience.PHOENIX})
@InterfaceStability.Evolving
public class RWQueueRpcExecutor extends RpcExecutor {
- private static final Log LOG = LogFactory.getLog(RWQueueRpcExecutor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RWQueueRpcExecutor.class);
public static final String CALL_QUEUE_READ_SHARE_CONF_KEY =
"hbase.ipc.server.callqueue.read.ratio";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcExecutor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcExecutor.java
index 445a460c75..d79416db0f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcExecutor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcExecutor.java
@@ -30,13 +30,13 @@ import java.util.concurrent.atomic.LongAdder;
import java.util.Map;
import java.util.HashMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.shaded.io.netty.util.internal.StringUtil;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler;
import org.apache.hadoop.hbase.util.BoundedPriorityBlockingQueue;
import org.apache.hadoop.hbase.util.ReflectionUtils;
@@ -51,7 +51,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Strings;
*/
@InterfaceAudience.Private
public abstract class RpcExecutor {
- private static final Log LOG = LogFactory.getLog(RpcExecutor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RpcExecutor.class);
protected static final int DEFAULT_CALL_QUEUE_SIZE_HARD_LIMIT = 250;
public static final String CALL_QUEUE_HANDLER_FACTOR_CONF_KEY = "hbase.ipc.server.callqueue.handler.factor";
@@ -151,7 +151,7 @@ public abstract class RpcExecutor {
}
protected int computeNumCallQueues(final int handlerCount, final float callQueuesHandlersFactor) {
- return Math.max(1, (int) Math.round(handlerCount * callQueuesHandlersFactor));
+ return Math.max(1, Math.round(handlerCount * callQueuesHandlersFactor));
}
public Map getCallQueueCountsSummary() {
@@ -204,8 +204,7 @@ public abstract class RpcExecutor {
queueInitArgs[0] = Math.max((int) queueInitArgs[0], DEFAULT_CALL_QUEUE_SIZE_HARD_LIMIT);
}
for (int i = 0; i < numQueues; ++i) {
- queues
- .add((BlockingQueue) ReflectionUtils.newInstance(queueClass, queueInitArgs));
+ queues.add(ReflectionUtils.newInstance(queueClass, queueInitArgs));
}
}
@@ -308,7 +307,7 @@ public abstract class RpcExecutor {
}
}
} catch (Exception e) {
- LOG.warn(e);
+ LOG.warn(e.toString(), e);
throw e;
} finally {
if (interrupted) {
@@ -385,6 +384,7 @@ public abstract class RpcExecutor {
this.queueSize = queueSize;
}
+ @Override
public int getNextQueue() {
return ThreadLocalRandom.current().nextInt(queueSize);
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
index 43af98836a..2a00c3d15f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
@@ -35,9 +35,6 @@ import java.util.Map;
import java.util.Optional;
import java.util.concurrent.atomic.LongAdder;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CallQueueTooBigException;
import org.apache.hadoop.hbase.CellScanner;
@@ -66,6 +63,8 @@ import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
import org.apache.hadoop.security.token.SecretManager;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService;
@@ -77,6 +76,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
/**
* An RPC server that hosts protobuf described Services.
*
@@ -85,7 +86,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHea
public abstract class RpcServer implements RpcServerInterface,
ConfigurationObserver {
// LOG is being used in CallRunner and the log level is being changed in tests
- public static final Log LOG = LogFactory.getLog(RpcServer.class);
+ public static final Logger LOG = LoggerFactory.getLogger(RpcServer.class);
protected static final CallQueueTooBigException CALL_QUEUE_TOO_BIG_EXCEPTION
= new CallQueueTooBigException();
@@ -109,7 +110,7 @@ public abstract class RpcServer implements RpcServerInterface,
protected static final String AUTH_FAILED_FOR = "Auth failed for ";
protected static final String AUTH_SUCCESSFUL_FOR = "Auth successful for ";
- protected static final Log AUDITLOG = LogFactory.getLog("SecurityLogger."
+ protected static final Logger AUDITLOG = LoggerFactory.getLogger("SecurityLogger."
+ Server.class.getName());
protected SecretManager secretManager;
protected final Map saslProps;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerFactory.java
index b1b047d711..fcd11f5bc7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerFactory.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerFactory.java
@@ -21,11 +21,11 @@ import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Server;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.ServiceDescriptor;
import org.apache.hadoop.hbase.util.ReflectionUtils;
@@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.util.ReflectionUtils;
@InterfaceAudience.Private
public class RpcServerFactory {
- public static final Log LOG = LogFactory.getLog(RpcServerFactory.class);
+ public static final Logger LOG = LoggerFactory.getLogger(RpcServerFactory.class);
public static final String CUSTOM_RPC_SERVER_IMPL_CONF_KEY = "hbase.rpc.server.impl";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ActiveMasterManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ActiveMasterManager.java
index ce9f290c56..62073db863 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ActiveMasterManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ActiveMasterManager.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.master;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.zookeeper.MasterAddressTracker;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
@@ -36,6 +34,8 @@ import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.zookeeper.ZKListener;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Handles everything on master-side related to master election.
@@ -52,7 +52,7 @@ import org.apache.zookeeper.KeeperException;
*/
@InterfaceAudience.Private
public class ActiveMasterManager extends ZKListener {
- private static final Log LOG = LogFactory.getLog(ActiveMasterManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ActiveMasterManager.class);
final AtomicBoolean clusterHasActiveMaster = new AtomicBoolean(false);
final AtomicBoolean clusterShutDown = new AtomicBoolean(false);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentVerificationReport.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentVerificationReport.java
index ccbfadc38b..5c084bf4eb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentVerificationReport.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentVerificationReport.java
@@ -26,14 +26,14 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.favored.FavoredNodeAssignmentHelper;
import org.apache.hadoop.hbase.favored.FavoredNodesPlan;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Helper class that is used by {@link RegionPlacementMaintainer} to print
* information for favored nodes
@@ -41,7 +41,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class AssignmentVerificationReport {
- private static final Log LOG = LogFactory.getLog(
+ private static final Logger LOG = LoggerFactory.getLogger(
AssignmentVerificationReport.class.getName());
private TableName tableName = null;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java
index d3ba231b15..23912d67c8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java
@@ -27,8 +27,6 @@ import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HConstants;
@@ -53,6 +51,8 @@ import org.apache.hadoop.hbase.util.PairOfSameType;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.util.Triple;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A janitor for the catalog tables. Scans the hbase:meta catalog
@@ -60,7 +60,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class CatalogJanitor extends ScheduledChore {
- private static final Log LOG = LogFactory.getLog(CatalogJanitor.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(CatalogJanitor.class.getName());
private final AtomicBoolean alreadyRunning = new AtomicBoolean(false);
private final AtomicBoolean enabled = new AtomicBoolean(true);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DeadServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DeadServer.java
index df5444ad4f..db04c606a9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DeadServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DeadServer.java
@@ -18,9 +18,9 @@
*/
package org.apache.hadoop.hbase.master;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Pair;
@@ -42,7 +42,7 @@ import java.util.Set;
*/
@InterfaceAudience.Private
public class DeadServer {
- private static final Log LOG = LogFactory.getLog(DeadServer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DeadServer.class);
/**
* Set of known dead servers. On znode expiration, servers are added here.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DrainingServerTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DrainingServerTracker.java
index 81a8b55385..a9e579629c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DrainingServerTracker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DrainingServerTracker.java
@@ -22,8 +22,6 @@ import java.util.List;
import java.util.NavigableSet;
import java.util.TreeSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.zookeeper.ZKListener;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
@@ -31,6 +29,8 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.ServerName;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tracks the list of draining region servers via ZK.
@@ -51,7 +51,7 @@ import org.apache.zookeeper.KeeperException;
*/
@InterfaceAudience.Private
public class DrainingServerTracker extends ZKListener {
- private static final Log LOG = LogFactory.getLog(DrainingServerTracker.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DrainingServerTracker.class);
private ServerManager serverManager;
private final NavigableSet drainingServers = new TreeSet<>();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ExpiredMobFileCleanerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ExpiredMobFileCleanerChore.java
index b1d83791c6..d37a80a1c3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ExpiredMobFileCleanerChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ExpiredMobFileCleanerChore.java
@@ -21,11 +21,11 @@ package org.apache.hadoop.hbase.master;
import java.util.Map;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.TableDescriptors;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.master.locking.LockManager;
@@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.procedure2.LockType;
@InterfaceAudience.Private
public class ExpiredMobFileCleanerChore extends ScheduledChore {
- private static final Log LOG = LogFactory.getLog(ExpiredMobFileCleanerChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ExpiredMobFileCleanerChore.class);
private final HMaster master;
private ExpiredMobFileCleaner cleaner;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 262dfa2375..e31db820cb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -39,6 +39,7 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
+import java.util.Objects;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
@@ -50,8 +51,6 @@ import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Function;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ClusterStatus;
@@ -91,6 +90,7 @@ import org.apache.hadoop.hbase.http.InfoServer;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
import org.apache.hadoop.hbase.ipc.RpcServer;
import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.master.MasterRpcServices.BalanceSwitchMode;
import org.apache.hadoop.hbase.master.assignment.AssignmentManager;
import org.apache.hadoop.hbase.master.assignment.MergeTableRegionsProcedure;
@@ -188,7 +188,8 @@ import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.ServerConnector;
import org.eclipse.jetty.servlet.ServletHolder;
import org.eclipse.jetty.webapp.WebAppContext;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
@@ -221,7 +222,7 @@ import com.google.protobuf.Service;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
@SuppressWarnings("deprecation")
public class HMaster extends HRegionServer implements MasterServices {
- private static final Log LOG = LogFactory.getLog(HMaster.class.getName());
+ private static Logger LOG = LoggerFactory.getLogger(HMaster.class.getName());
/**
* Protection against zombie master. Started once Master accepts active responsibility and
@@ -607,6 +608,7 @@ public class HMaster extends HRegionServer implements MasterServices {
return connector.getLocalPort();
}
+ @Override
protected Function getMetaTableObserver() {
return builder -> builder.setRegionReplication(conf.getInt(HConstants.META_REPLICAS_NUM, HConstants.DEFAULT_META_REPLICA_NUM));
}
@@ -818,7 +820,7 @@ public class HMaster extends HRegionServer implements MasterServices {
// Wait for region servers to report in
String statusStr = "Wait for region servers to report in";
status.setStatus(statusStr);
- LOG.info(status);
+ LOG.info(Objects.toString(status));
waitForRegionServers(status);
if (this.balancer instanceof FavoredNodesPromoter) {
@@ -1528,6 +1530,7 @@ public class HMaster extends HRegionServer implements MasterServices {
/**
* @return Client info for use as prefix on an audit log string; who did an action
*/
+ @Override
public String getClientIdAuditPrefix() {
return "Client=" + RpcServer.getRequestUserName().orElse(null)
+ "/" + RpcServer.getRemoteAddress().orElse(null);
@@ -2017,7 +2020,7 @@ public class HMaster extends HRegionServer implements MasterServices {
}
} catch (Throwable t) {
status.setStatus("Failed to become active: " + t.getMessage());
- LOG.fatal("Failed to become active master", t);
+ LOG.error(HBaseMarkers.FATAL, "Failed to become active master", t);
// HBASE-5680: Likely hadoop23 vs hadoop 20.x/1.x incompatibility
if (t instanceof NoClassDefFoundError &&
t.getMessage()
@@ -2606,13 +2609,13 @@ public class HMaster extends HRegionServer implements MasterServices {
}
if (cpHost != null) {
// HBASE-4014: dump a list of loaded coprocessors.
- LOG.fatal("Master server abort: loaded coprocessors are: " +
+ LOG.error(HBaseMarkers.FATAL, "Master server abort: loaded coprocessors are: " +
getLoadedCoprocessors());
}
if (t != null) {
- LOG.fatal(msg, t);
+ LOG.error(HBaseMarkers.FATAL, msg, t);
} else {
- LOG.fatal(msg);
+ LOG.error(HBaseMarkers.FATAL, msg);
}
try {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java
index 093412a47f..3ec70d3b03 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java
@@ -26,8 +26,6 @@ import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.LocalHBaseCluster;
@@ -46,10 +44,12 @@ import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class HMasterCommandLine extends ServerCommandLine {
- private static final Log LOG = LogFactory.getLog(HMasterCommandLine.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HMasterCommandLine.class);
private static final String USAGE =
"Usage: Master [opts] start|stop|clear\n" +
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
index bc262290fc..ee7bcd6e6a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
@@ -23,8 +23,7 @@ import java.util.List;
import java.util.Set;
import com.google.protobuf.Service;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.MetaMutationAnnotation;
@@ -61,6 +60,8 @@ import org.apache.hadoop.hbase.quotas.GlobalQuotaSettings;
import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
import org.apache.hadoop.hbase.security.User;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Provides the coprocessor framework and environment for master oriented
@@ -71,7 +72,7 @@ import org.apache.yetus.audience.InterfaceAudience;
public class MasterCoprocessorHost
extends CoprocessorHost {
- private static final Log LOG = LogFactory.getLog(MasterCoprocessorHost.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MasterCoprocessorHost.class);
/**
* Coprocessor environment extension providing access to master related
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
index 27987f6bce..8c2c9fdcdc 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.master;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -39,6 +37,7 @@ import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.fs.HFileSystem;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.mob.MobConstants;
import org.apache.hadoop.hbase.procedure2.store.wal.WALProcedureStore;
import org.apache.hadoop.hbase.regionserver.HRegion;
@@ -47,6 +46,8 @@ import org.apache.hadoop.hbase.util.FSTableDescriptors;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This class abstracts a bunch of operations the HMaster needs to interact with
@@ -55,7 +56,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class MasterFileSystem {
- private static final Log LOG = LogFactory.getLog(MasterFileSystem.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MasterFileSystem.class);
/** Parameter name for HBase instance root directory permission*/
public static final String HBASE_DIR_PERMS = "hbase.rootdir.perms";
@@ -264,12 +265,13 @@ public class MasterFileSystem {
HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS));
}
} catch (DeserializationException de) {
- LOG.fatal("Please fix invalid configuration for " + HConstants.HBASE_DIR, de);
+ LOG.error(HBaseMarkers.FATAL, "Please fix invalid configuration for "
+ + HConstants.HBASE_DIR, de);
IOException ioe = new IOException();
ioe.initCause(de);
throw ioe;
} catch (IllegalArgumentException iae) {
- LOG.fatal("Please fix invalid configuration for "
+ LOG.error(HBaseMarkers.FATAL, "Please fix invalid configuration for "
+ HConstants.HBASE_DIR + " " + rd.toString(), iae);
throw iae;
}
@@ -444,7 +446,7 @@ public class MasterFileSystem {
public void stop() {
}
- public void logFileSystemState(Log log) throws IOException {
+ public void logFileSystemState(Logger log) throws IOException {
FSUtils.logFileSystemState(fs, rootdir, log);
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterMetaBootstrap.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterMetaBootstrap.java
index 7abf02cf55..78e7e6e964 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterMetaBootstrap.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterMetaBootstrap.java
@@ -22,8 +22,6 @@ import java.io.IOException;
import java.util.List;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
@@ -38,13 +36,15 @@ import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Used by the HMaster on startup to split meta logs and assign the meta table.
*/
@InterfaceAudience.Private
public class MasterMetaBootstrap {
- private static final Log LOG = LogFactory.getLog(MasterMetaBootstrap.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MasterMetaBootstrap.class);
private final MonitoredTask status;
private final HMaster master;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterMobCompactionThread.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterMobCompactionThread.java
index 8677975396..9d6da0c1ff 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterMobCompactionThread.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterMobCompactionThread.java
@@ -27,12 +27,12 @@ import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.master.locking.LockManager;
import org.apache.hadoop.hbase.mob.MobUtils;
@@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
*/
@InterfaceAudience.Private
public class MasterMobCompactionThread {
- static final Log LOG = LogFactory.getLog(MasterMobCompactionThread.class);
+ static final Logger LOG = LoggerFactory.getLogger(MasterMobCompactionThread.class);
private final HMaster master;
private final Configuration conf;
private final ExecutorService mobCompactorPool;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
index 6044d021c1..4a4bbe1718 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
@@ -31,8 +31,6 @@ import java.util.Map.Entry;
import java.util.Set;
import java.util.stream.Collectors;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HConstants;
@@ -94,6 +92,9 @@ import org.apache.hadoop.hbase.util.ForeignExceptionUtil;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
@@ -290,7 +291,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot
public class MasterRpcServices extends RSRpcServices
implements MasterService.BlockingInterface, RegionServerStatusService.BlockingInterface,
LockService.BlockingInterface {
- private static final Log LOG = LogFactory.getLog(MasterRpcServices.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(MasterRpcServices.class.getName());
private final HMaster master;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterWalManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterWalManager.java
index 891ea9b49c..52ba0991aa 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterWalManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterWalManager.java
@@ -27,8 +27,6 @@ import java.util.Set;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -41,7 +39,8 @@ import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
import org.apache.hadoop.hbase.wal.WALSplitter;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -50,7 +49,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public class MasterWalManager {
- private static final Log LOG = LogFactory.getLog(MasterWalManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MasterWalManager.class);
final static PathFilter META_FILTER = new PathFilter() {
@Override
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MetricsMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MetricsMaster.java
index c2c37bdd29..83a6988006 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MetricsMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MetricsMaster.java
@@ -18,10 +18,10 @@
package org.apache.hadoop.hbase.master;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.metrics.Counter;
import org.apache.hadoop.hbase.metrics.Histogram;
@@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.procedure2.ProcedureMetrics;
@InterfaceStability.Evolving
@InterfaceAudience.Private
public class MetricsMaster {
- private static final Log LOG = LogFactory.getLog(MetricsMaster.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MetricsMaster.class);
private MetricsMasterSource masterSource;
private MetricsMasterProcSource masterProcSource;
private MetricsMasterQuotaSource masterQuotaSource;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MobCompactionChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MobCompactionChore.java
index 2e0e44c470..8a7c4e1bf8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MobCompactionChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MobCompactionChore.java
@@ -22,11 +22,11 @@ import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.TableDescriptors;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableState;
@@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.procedure2.LockType;
@InterfaceAudience.Private
public class MobCompactionChore extends ScheduledChore {
- private static final Log LOG = LogFactory.getLog(MobCompactionChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MobCompactionChore.class);
private HMaster master;
private ExecutorService pool;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RackManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RackManager.java
index 58acf83c9f..447c6a6d8e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RackManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RackManager.java
@@ -22,9 +22,9 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.util.ReflectionUtils;
@@ -37,7 +37,7 @@ import org.apache.hadoop.net.ScriptBasedMapping;
*/
@InterfaceAudience.Private
public class RackManager {
- private static final Log LOG = LogFactory.getLog(RackManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RackManager.class);
public static final String UNKNOWN_RACK = "Unknown Rack";
private DNSToSwitchMapping switchMapping;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.java
index afd402b190..370f1f26f7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.java
@@ -38,8 +38,6 @@ import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hbase.ClusterStatus.Option;
@@ -57,10 +55,9 @@ import org.apache.hadoop.hbase.favored.FavoredNodesPlan;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.MunkresAssignment;
import org.apache.hadoop.hbase.util.Pair;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;
@@ -74,7 +71,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavor
@InterfaceAudience.Private
// TODO: Remove? Unused. Partially implemented only.
public class RegionPlacementMaintainer {
- private static final Log LOG = LogFactory.getLog(RegionPlacementMaintainer.class
+ private static final Logger LOG = LoggerFactory.getLogger(RegionPlacementMaintainer.class
.getName());
//The cost of a placement that should never be assigned.
private static final float MAX_COST = Float.POSITIVE_INFINITY;
@@ -980,12 +977,6 @@ public class RegionPlacementMaintainer {
opt.addOption("ld", "locality-dispersion", false, "print locality and dispersion " +
"information for current plan");
try {
- // Set the log4j
- Logger.getLogger("org.apache.zookeeper").setLevel(Level.ERROR);
- Logger.getLogger("org.apache.hadoop.hbase").setLevel(Level.ERROR);
- Logger.getLogger("org.apache.hadoop.hbase.master.RegionPlacementMaintainer")
- .setLevel(Level.INFO);
-
CommandLine cmd = new GnuParser().parse(opt, args);
Configuration conf = HBaseConfiguration.create();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionServerTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionServerTracker.java
index 0e9351d226..2f2d536ab5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionServerTracker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionServerTracker.java
@@ -25,8 +25,6 @@ import java.util.List;
import java.util.NavigableMap;
import java.util.TreeMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.zookeeper.ZKListener;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
@@ -36,6 +34,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tracks the online region servers via ZK.
@@ -49,7 +49,7 @@ import org.apache.zookeeper.KeeperException;
*/
@InterfaceAudience.Private
public class RegionServerTracker extends ZKListener {
- private static final Log LOG = LogFactory.getLog(RegionServerTracker.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionServerTracker.class);
private NavigableMap regionServers = new TreeMap<>();
private ServerManager serverManager;
private MasterServices server;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java
index b86315be7a..923a0a7ae7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java
@@ -38,8 +38,6 @@ import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Predicate;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClockOutOfSyncException;
import org.apache.hadoop.hbase.HConstants;
@@ -61,7 +59,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -109,7 +108,7 @@ public class ServerManager {
public static final String WAIT_ON_REGIONSERVERS_INTERVAL =
"hbase.master.wait.on.regionservers.interval";
- private static final Log LOG = LogFactory.getLog(ServerManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ServerManager.class);
// Set if we are to shutdown the cluster.
private AtomicBoolean clusterShutdown = new AtomicBoolean(false);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotOfRegionAssignmentFromMeta.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotOfRegionAssignmentFromMeta.java
index 5d889a4f73..9031577584 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotOfRegionAssignmentFromMeta.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotOfRegionAssignmentFromMeta.java
@@ -32,8 +32,6 @@ import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.MetaTableAccessor;
@@ -47,6 +45,8 @@ import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.favored.FavoredNodeAssignmentHelper;
import org.apache.hadoop.hbase.favored.FavoredNodesPlan;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Used internally for reading meta and constructing datastructures that are
@@ -56,7 +56,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class SnapshotOfRegionAssignmentFromMeta {
- private static final Log LOG = LogFactory.getLog(SnapshotOfRegionAssignmentFromMeta.class
+ private static final Logger LOG = LoggerFactory.getLogger(SnapshotOfRegionAssignmentFromMeta.class
.getName());
private final Connection connection;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java
index 8ab087d814..97fa7c8f5f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java
@@ -36,8 +36,6 @@ import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -50,13 +48,15 @@ import org.apache.hadoop.hbase.SplitLogCounters;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.coordination.SplitLogManagerCoordination;
import org.apache.hadoop.hbase.coordination.SplitLogManagerCoordination.SplitLogManagerDetails;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.monitoring.TaskMonitor;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -90,7 +90,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public class SplitLogManager {
- private static final Log LOG = LogFactory.getLog(SplitLogManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SplitLogManager.class);
private final MasterServices server;
@@ -415,7 +415,7 @@ public class SplitLogManager {
batch.installed++;
return null;
}
- LOG.fatal("Logic error. Deleted task still present in tasks map");
+ LOG.error(HBaseMarkers.FATAL, "Logic error. Deleted task still present in tasks map");
assert false : "Deleted task still present in tasks map";
return t;
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java
index 174272e3f4..dbf6d394b6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java
@@ -23,8 +23,6 @@ import java.io.InterruptedIOException;
import java.util.NavigableSet;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilderFactory;
@@ -55,6 +53,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This is a helper class used internally to manage the namespace metadata that is stored in
@@ -69,7 +69,7 @@ import org.apache.yetus.audience.InterfaceAudience;
justification="TODO: synchronize access on nsTable but it is done in tiers above and this " +
"class is going away/shrinking")
public class TableNamespaceManager {
- private static final Log LOG = LogFactory.getLog(TableNamespaceManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableNamespaceManager.class);
private Configuration conf;
private MasterServices masterServices;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java
index 330b752b5c..ad8908ac96 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java
@@ -28,13 +28,14 @@ import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
import edu.umd.cs.findbugs.annotations.NonNull;
import edu.umd.cs.findbugs.annotations.Nullable;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.TableDescriptors;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.TableState;
@@ -46,7 +47,7 @@ import org.apache.hadoop.hbase.client.TableState;
*/
@InterfaceAudience.Private
public class TableStateManager {
- private static final Log LOG = LogFactory.getLog(TableStateManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableStateManager.class);
private final ReadWriteLock lock = new ReentrantReadWriteLock();
private final MasterServices master;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java
index 770d8a49d4..801caf5eac 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java
@@ -22,8 +22,6 @@ package org.apache.hadoop.hbase.master.assignment;
import java.io.IOException;
import java.util.Comparator;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -40,7 +38,8 @@ import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException;
import org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.RemoteOperation;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AssignRegionStateData;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RegionTransitionState;
@@ -72,7 +71,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProto
*/
@InterfaceAudience.Private
public class AssignProcedure extends RegionTransitionProcedure {
- private static final Log LOG = LogFactory.getLog(AssignProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AssignProcedure.class);
/**
* Set to true when we need recalibrate -- choose a new target -- because original assign failed.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java
index 5daf96d8c7..fac51f037e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java
@@ -36,8 +36,6 @@ import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
import java.util.stream.Collectors;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.HConstants;
@@ -92,6 +90,8 @@ import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.util.VersionInfo;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* The AssignmentManager is the coordinator for region assign/unassign operations.
@@ -106,7 +106,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class AssignmentManager implements ServerListener {
- private static final Log LOG = LogFactory.getLog(AssignmentManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AssignmentManager.class);
// TODO: AMv2
// - handle region migration from hbase1 to hbase2.
@@ -510,7 +510,7 @@ public class AssignmentManager implements ServerListener {
}
}
} catch (Throwable t) {
- LOG.error(t);
+ LOG.error(t.toString(), t);
}
}).start();
}
@@ -748,7 +748,7 @@ public class AssignmentManager implements ServerListener {
plan.setDestination(getBalancer().randomAssignment(plan.getRegionInfo(),
this.master.getServerManager().createDestinationServersList(exclude)));
} catch (HBaseIOException e) {
- LOG.warn(e);
+ LOG.warn(e.toString(), e);
}
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCMergedRegionsProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCMergedRegionsProcedure.java
index 37521cc67f..610003df91 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCMergedRegionsProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCMergedRegionsProcedure.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.master.assignment;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -30,7 +28,8 @@ import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException;
import org.apache.hadoop.hbase.procedure2.ProcedureYieldException;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.GCMergedRegionsState;
@@ -45,7 +44,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.G
@InterfaceAudience.Private
public class GCMergedRegionsProcedure
extends AbstractStateMachineTableProcedure {
- private static final Log LOG = LogFactory.getLog(GCMergedRegionsProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(GCMergedRegionsProcedure.class);
private RegionInfo father;
private RegionInfo mother;
private RegionInfo mergedChild;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCRegionProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCRegionProcedure.java
index 805b870b38..2b433484c6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCRegionProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCRegionProcedure.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.master.assignment;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.backup.HFileArchiver;
@@ -33,7 +31,8 @@ import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException;
import org.apache.hadoop.hbase.procedure2.ProcedureYieldException;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
@@ -47,7 +46,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.G
*/
@InterfaceAudience.Private
public class GCRegionProcedure extends AbstractStateMachineRegionProcedure {
- private static final Log LOG = LogFactory.getLog(GCRegionProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(GCRegionProcedure.class);
public GCRegionProcedure(final MasterProcedureEnv env, final RegionInfo hri) {
super(env, hri);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
index 0d107f2a9d..6b2d54e1a6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
@@ -24,8 +24,6 @@ import java.util.Arrays;
import java.util.Collection;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -64,7 +62,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
@@ -79,7 +78,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.M
@InterfaceAudience.Private
public class MergeTableRegionsProcedure
extends AbstractStateMachineTableProcedure {
- private static final Log LOG = LogFactory.getLog(MergeTableRegionsProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MergeTableRegionsProcedure.class);
private Boolean traceEnabled;
private volatile boolean lock = false;
private ServerName regionLocation;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
index 5940f2fe31..a29bfee2cb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.master.assignment;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -31,7 +29,8 @@ import org.apache.hadoop.hbase.master.procedure.AbstractStateMachineRegionProced
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.MoveRegionState;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.MoveRegionStateData;
@@ -44,7 +43,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.M
*/
@InterfaceAudience.Private
public class MoveRegionProcedure extends AbstractStateMachineRegionProcedure {
- private static final Log LOG = LogFactory.getLog(MoveRegionProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MoveRegionProcedure.class);
private RegionPlan plan;
public MoveRegionProcedure() {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStateStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStateStore.java
index 079dbd5e77..a45c9e92e7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStateStore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStateStore.java
@@ -24,8 +24,6 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilderFactory;
@@ -49,15 +47,18 @@ import org.apache.hadoop.hbase.util.MultiHConnection;
import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
+
/**
* Store Region State to hbase:meta table.
*/
@InterfaceAudience.Private
public class RegionStateStore {
- private static final Log LOG = LogFactory.getLog(RegionStateStore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionStateStore.class);
/** The delimiter for meta columns for replicaIds > 0 */
protected static final char META_REPLICA_ID_DELIMITER = '_';
@@ -198,7 +199,7 @@ public class RegionStateStore {
.setType(DataType.Put)
.setValue(Bytes.toBytes(state.name()))
.build());
- LOG.info(info);
+ LOG.info(info.toString());
final boolean serialReplication = hasSerialReplicationScope(regionInfo.getTable());
if (serialReplication && state == State.OPEN) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStates.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStates.java
index e9468734a2..c43760474b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStates.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStates.java
@@ -34,8 +34,6 @@ import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentSkipListMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
@@ -47,7 +45,8 @@ import org.apache.hadoop.hbase.procedure2.ProcedureEvent;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -57,7 +56,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public class RegionStates {
- private static final Log LOG = LogFactory.getLog(RegionStates.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionStates.class);
protected static final State[] STATES_EXPECTED_ON_OPEN = new State[] {
State.OFFLINE, State.CLOSED, // disable/offline
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
index 17ba75a07c..fbd6946954 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
@@ -22,8 +22,6 @@ package org.apache.hadoop.hbase.master.assignment;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -36,11 +34,11 @@ import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException;
import org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.RemoteOperation;
import org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.RemoteProcedure;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RegionTransitionState;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
-
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Base class for the Assign and Unassign Procedure.
@@ -89,7 +87,7 @@ public abstract class RegionTransitionProcedure
extends Procedure
implements TableProcedureInterface,
RemoteProcedure {
- private static final Log LOG = LogFactory.getLog(RegionTransitionProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionTransitionProcedure.class);
protected final AtomicBoolean aborted = new AtomicBoolean(false);
@@ -163,6 +161,7 @@ public abstract class RegionTransitionProcedure
protected abstract void reportTransition(MasterProcedureEnv env,
RegionStateNode regionNode, TransitionCode code, long seqId) throws UnexpectedStateException;
+ @Override
public abstract RemoteOperation remoteCallBuild(MasterProcedureEnv env, ServerName serverName);
/**
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java
index 69024340a4..809d6f3d24 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java
@@ -33,8 +33,6 @@ import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -72,7 +70,8 @@ import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
@@ -87,7 +86,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.S
@InterfaceAudience.Private
public class SplitTableRegionProcedure
extends AbstractStateMachineRegionProcedure {
- private static final Log LOG = LogFactory.getLog(SplitTableRegionProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SplitTableRegionProcedure.class);
private Boolean traceEnabled = null;
private RegionInfo daughter_1_RI;
private RegionInfo daughter_2_RI;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.java
index 66277bec11..8536e77ef1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.master.assignment;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.ServerName;
@@ -40,7 +38,8 @@ import org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.RemoteOperat
import org.apache.hadoop.hbase.regionserver.RegionServerAbortedException;
import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RegionTransitionState;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.UnassignRegionStateData;
@@ -72,7 +71,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProto
*/
@InterfaceAudience.Private
public class UnassignProcedure extends RegionTransitionProcedure {
- private static final Log LOG = LogFactory.getLog(UnassignProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(UnassignProcedure.class);
/**
* Where to send the unassign RPC.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BalancerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BalancerChore.java
index b964f219f0..712567cb3d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BalancerChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BalancerChore.java
@@ -20,11 +20,11 @@ package org.apache.hadoop.hbase.master.balancer;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.master.HMaster;
/**
@@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.master.HMaster;
*/
@InterfaceAudience.Private
public class BalancerChore extends ScheduledChore {
- private static final Log LOG = LogFactory.getLog(BalancerChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(BalancerChore.class);
private final HMaster master;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
index 8dd34c28ac..5bacebb671 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
@@ -35,8 +35,6 @@ import java.util.function.Predicate;
import java.util.stream.Collectors;
import org.apache.commons.lang3.NotImplementedException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -53,6 +51,8 @@ import org.apache.hadoop.hbase.master.MasterServices;
import org.apache.hadoop.hbase.master.RackManager;
import org.apache.hadoop.hbase.master.RegionPlan;
import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action.Type;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
@@ -1004,7 +1004,7 @@ public abstract class BaseLoadBalancer implements LoadBalancer {
protected Configuration config = HBaseConfiguration.create();
protected RackManager rackManager;
private static final Random RANDOM = new Random(System.currentTimeMillis());
- private static final Log LOG = LogFactory.getLog(BaseLoadBalancer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(BaseLoadBalancer.class);
protected MetricsBalancer metricsBalancer = null;
protected ClusterStatus clusterStatus = null;
protected ServerName masterServerName;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/ClusterStatusChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/ClusterStatusChore.java
index 31b1e09291..427322dfc4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/ClusterStatusChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/ClusterStatusChore.java
@@ -20,10 +20,10 @@ package org.apache.hadoop.hbase.master.balancer;
import java.io.InterruptedIOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.master.LoadBalancer;
@@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.master.LoadBalancer;
*/
@InterfaceAudience.Private
public class ClusterStatusChore extends ScheduledChore {
- private static final Log LOG = LogFactory.getLog(ClusterStatusChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ClusterStatusChore.class);
private final HMaster master;
private final LoadBalancer balancer;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java
index a2fe9a25f2..b3e72faa1c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java
@@ -32,8 +32,6 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.ServerLoad;
import org.apache.hadoop.hbase.ServerName;
@@ -47,7 +45,8 @@ import org.apache.hadoop.hbase.master.LoadBalancer;
import org.apache.hadoop.hbase.master.MasterServices;
import org.apache.hadoop.hbase.master.RegionPlan;
import org.apache.hadoop.hbase.util.Pair;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
@@ -70,7 +69,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
public class FavoredStochasticBalancer extends StochasticLoadBalancer implements
FavoredNodesPromoter {
- private static final Log LOG = LogFactory.getLog(FavoredStochasticBalancer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FavoredStochasticBalancer.class);
private FavoredNodesManager fnm;
@Override
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java
index 3046243622..2b48f59a61 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java
@@ -28,8 +28,6 @@ import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.HDFSBlocksDistribution;
@@ -42,7 +40,8 @@ import org.apache.hadoop.hbase.master.assignment.AssignmentManager;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheBuilder;
import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheLoader;
import org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCache;
@@ -61,7 +60,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa
*/
@InterfaceAudience.Private
class RegionLocationFinder {
- private static final Log LOG = LogFactory.getLog(RegionLocationFinder.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionLocationFinder.class);
private static final long CACHE_TIME = 240 * 60 * 1000;
private static final HDFSBlocksDistribution EMPTY_BLOCK_DISTRIBUTION = new HDFSBlocksDistribution();
private Configuration conf;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/SimpleLoadBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/SimpleLoadBalancer.java
index e356942cbe..adfc577877 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/SimpleLoadBalancer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/SimpleLoadBalancer.java
@@ -28,8 +28,6 @@ import java.util.NavigableMap;
import java.util.Random;
import java.util.TreeMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@@ -39,7 +37,8 @@ import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.master.RegionPlan;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.MinMaxPriorityQueue;
/**
@@ -57,7 +56,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.MinMaxPriorityQu
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class SimpleLoadBalancer extends BaseLoadBalancer {
- private static final Log LOG = LogFactory.getLog(SimpleLoadBalancer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SimpleLoadBalancer.class);
private static final Random RANDOM = new Random(System.currentTimeMillis());
private RegionInfoComparator riComparator = new RegionInfoComparator();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.java
index 23d9cb47c6..7123155925 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.java
@@ -30,8 +30,6 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@@ -52,7 +50,8 @@ import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.SwapRegi
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Optional;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@@ -121,7 +120,7 @@ public class StochasticLoadBalancer extends BaseLoadBalancer {
"hbase.master.balancer.stochastic.minCostNeedBalance";
protected static final Random RANDOM = new Random(System.currentTimeMillis());
- private static final Log LOG = LogFactory.getLog(StochasticLoadBalancer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(StochasticLoadBalancer.class);
Map> loads = new HashMap<>();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
index 582df8436a..bc7c82d1db 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
@@ -35,8 +35,6 @@ import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.RecursiveTask;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -45,6 +43,8 @@ import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.ipc.RemoteException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Abstract Cleaner that uses a chain of delegates to clean a directory of files
@@ -53,7 +53,7 @@ import org.apache.hadoop.ipc.RemoteException;
public abstract class CleanerChore extends ScheduledChore
implements ConfigurationObserver {
- private static final Log LOG = LogFactory.getLog(CleanerChore.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(CleanerChore.class.getName());
private static final int AVAIL_PROCESSORS = Runtime.getRuntime().availableProcessors();
/**
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java
index 5c78dc498e..8c02f3ed6e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java
@@ -25,8 +25,6 @@ import java.util.Map;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -36,7 +34,8 @@ import org.apache.hadoop.hbase.io.HFileLink;
import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
import org.apache.hadoop.hbase.util.StealJobQueue;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
* This Chore, every time it runs, will clear the HFiles in the hfile archive
@@ -77,7 +76,7 @@ public class HFileCleaner extends CleanerChore {
"hbase.regionserver.hfilecleaner.small.thread.count";
public final static int DEFAULT_SMALL_HFILE_DELETE_THREAD_NUMBER = 1;
- private static final Log LOG = LogFactory.getLog(HFileCleaner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HFileCleaner.class);
StealJobQueue largeFileQueue;
BlockingQueue smallFileQueue;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileLinkCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileLinkCleaner.java
index 8129e34b9c..db5230c853 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileLinkCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileLinkCleaner.java
@@ -18,8 +18,7 @@
package org.apache.hadoop.hbase.master.cleaner;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -27,6 +26,8 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.io.HFileLink;
import org.apache.hadoop.hbase.mob.MobUtils;
import org.apache.hadoop.hbase.util.FSUtils;
@@ -41,7 +42,7 @@ import org.apache.hadoop.hbase.util.FSUtils;
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class HFileLinkCleaner extends BaseHFileCleanerDelegate {
- private static final Log LOG = LogFactory.getLog(HFileLinkCleaner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HFileLinkCleaner.class);
private FileSystem fs = null;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java
index 44aafe2038..5d5dddbc17 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java
@@ -25,8 +25,6 @@ import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.LinkedBlockingQueue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -35,7 +33,8 @@ import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureUtil;
import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -45,7 +44,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public class LogCleaner extends CleanerChore {
- private static final Log LOG = LogFactory.getLog(LogCleaner.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(LogCleaner.class.getName());
public static final String OLD_WALS_CLEANER_SIZE = "hbase.oldwals.cleaner.thread.size";
public static final int OLD_WALS_CLEANER_DEFAULT_SIZE = 2;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationMetaCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationMetaCleaner.java
index 23e5a666b2..43a99bdb51 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationMetaCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationMetaCleaner.java
@@ -25,14 +25,14 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.Delete;
@@ -48,7 +48,7 @@ import org.apache.hadoop.hbase.util.Bytes;
@InterfaceAudience.Private
public class ReplicationMetaCleaner extends ScheduledChore {
- private static final Log LOG = LogFactory.getLog(ReplicationMetaCleaner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationMetaCleaner.class);
private final Admin admin;
private final MasterServices master;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationZKNodeCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationZKNodeCleaner.java
index 3f7bd74473..97deab51ed 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationZKNodeCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationZKNodeCleaner.java
@@ -26,8 +26,6 @@ import java.util.Map;
import java.util.Set;
import java.util.Map.Entry;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
@@ -41,13 +39,15 @@ import org.apache.hadoop.hbase.replication.ReplicationStateZKBase;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Used to clean the replication queues belonging to the peer which does not exist.
*/
@InterfaceAudience.Private
public class ReplicationZKNodeCleaner {
- private static final Log LOG = LogFactory.getLog(ReplicationZKNodeCleaner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationZKNodeCleaner.class);
private final ZKWatcher zkw;
private final ReplicationQueuesClient queuesClient;
private final ReplicationPeers replicationPeers;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationZKNodeCleanerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationZKNodeCleanerChore.java
index 6be13849fa..8d5df9bfd2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationZKNodeCleanerChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationZKNodeCleanerChore.java
@@ -22,18 +22,18 @@ import java.io.IOException;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Chore that will clean the replication queues belonging to the peer which does not exist.
*/
@InterfaceAudience.Private
public class ReplicationZKNodeCleanerChore extends ScheduledChore {
- private static final Log LOG = LogFactory.getLog(ReplicationZKNodeCleanerChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationZKNodeCleanerChore.class);
private final ReplicationZKNodeCleaner cleaner;
public ReplicationZKNodeCleanerChore(Stoppable stopper, int period,
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveHFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveHFileCleaner.java
index f9ebdf3bb2..e789752d1a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveHFileCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveHFileCleaner.java
@@ -17,9 +17,9 @@
*/
package org.apache.hadoop.hbase.master.cleaner;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class TimeToLiveHFileCleaner extends BaseHFileCleanerDelegate {
- private static final Log LOG = LogFactory.getLog(TimeToLiveHFileCleaner.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(TimeToLiveHFileCleaner.class.getName());
public static final String TTL_CONF_KEY = "hbase.master.hfilecleaner.ttl";
// default ttl = 5 minutes
public static final long DEFAULT_TTL = 60000 * 5;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveLogCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveLogCleaner.java
index c2b872f1ef..7385273e71 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveLogCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveLogCleaner.java
@@ -17,10 +17,10 @@
*/
package org.apache.hadoop.hbase.master.cleaner;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class TimeToLiveLogCleaner extends BaseLogCleanerDelegate {
- private static final Log LOG = LogFactory.getLog(TimeToLiveLogCleaner.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(TimeToLiveLogCleaner.class.getName());
public static final String TTL_CONF_KEY = "hbase.master.logcleaner.ttl";
// default ttl = 10 minutes
public static final long DEFAULT_TTL = 600_000L;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveProcedureWALCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveProcedureWALCleaner.java
index cd9a7ec3d2..467accdac0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveProcedureWALCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveProcedureWALCleaner.java
@@ -17,14 +17,14 @@
*/
package org.apache.hadoop.hbase.master.cleaner;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureUtil;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Procedure WAL cleaner that uses the timestamp of the Procedure WAL to determine if it should be
@@ -32,8 +32,8 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class TimeToLiveProcedureWALCleaner extends BaseFileCleanerDelegate {
-
- private static final Log LOG = LogFactory.getLog(TimeToLiveProcedureWALCleaner.class.getName());
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TimeToLiveProcedureWALCleaner.class.getName());
public static final String TTL_CONF_KEY = "hbase.master.procedurewalcleaner.ttl";
// default ttl = 7 days
public static final long DEFAULT_TTL = 604_800_000L;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockManager.java
index 883d6596ca..36269f378e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockManager.java
@@ -22,15 +22,14 @@ import java.io.IOException;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.procedure2.LockType;
import org.apache.hadoop.hbase.util.NonceKey;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -38,7 +37,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public final class LockManager {
- private static final Log LOG = LogFactory.getLog(LockManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(LockManager.class);
private final HMaster master;
private final RemoteLocks remoteLocks;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockProcedure.java
index 61843d81fa..edf7642574 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockProcedure.java
@@ -24,8 +24,6 @@ import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -37,7 +35,8 @@ import org.apache.hadoop.hbase.procedure2.ProcedureEvent;
import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData;
@@ -57,7 +56,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
@InterfaceAudience.Private
public final class LockProcedure extends Procedure
implements TableProcedureInterface {
- private static final Log LOG = LogFactory.getLog(LockProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(LockProcedure.class);
public static final int DEFAULT_REMOTE_LOCKS_TIMEOUT_MS = 30000; // timeout in ms
public static final String REMOTE_LOCKS_TIMEOUT_MS_CONF =
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/MergeNormalizationPlan.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/MergeNormalizationPlan.java
index b6602b190b..7c33661d7c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/MergeNormalizationPlan.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/MergeNormalizationPlan.java
@@ -20,18 +20,18 @@ package org.apache.hadoop.hbase.master.normalizer;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Normalization plan to merge regions (smallest region in the table with its smallest neighbor).
*/
@InterfaceAudience.Private
public class MergeNormalizationPlan implements NormalizationPlan {
- private static final Log LOG = LogFactory.getLog(MergeNormalizationPlan.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(MergeNormalizationPlan.class.getName());
private final RegionInfo firstRegion;
private final RegionInfo secondRegion;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/RegionNormalizerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/RegionNormalizerChore.java
index 7137bc9d9d..19d2dc7a3b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/RegionNormalizerChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/RegionNormalizerChore.java
@@ -18,10 +18,10 @@
*/
package org.apache.hadoop.hbase.master.normalizer;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.master.HMaster;
import java.io.IOException;
@@ -32,7 +32,7 @@ import java.io.IOException;
*/
@InterfaceAudience.Private
public class RegionNormalizerChore extends ScheduledChore {
- private static final Log LOG = LogFactory.getLog(RegionNormalizerChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionNormalizerChore.class);
private final HMaster master;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java
index 8190f271ce..767324aa49 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java
@@ -23,8 +23,6 @@ import java.util.Collections;
import java.util.Comparator;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.RegionLoad;
import org.apache.hadoop.hbase.ServerName;
@@ -35,7 +33,8 @@ import org.apache.hadoop.hbase.master.MasterRpcServices;
import org.apache.hadoop.hbase.master.MasterServices;
import org.apache.hadoop.hbase.master.normalizer.NormalizationPlan.PlanType;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
/**
@@ -60,7 +59,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
@InterfaceAudience.Private
public class SimpleRegionNormalizer implements RegionNormalizer {
- private static final Log LOG = LogFactory.getLog(SimpleRegionNormalizer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SimpleRegionNormalizer.class);
private static final int MIN_REGION_COUNT = 3;
private MasterServices masterServices;
private MasterRpcServices masterRpcServices;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SplitNormalizationPlan.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SplitNormalizationPlan.java
index 9217143edd..b5f8e823ec 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SplitNormalizationPlan.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SplitNormalizationPlan.java
@@ -21,18 +21,18 @@ package org.apache.hadoop.hbase.master.normalizer;
import java.io.IOException;
import java.util.Arrays;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Normalization plan to split region.
*/
@InterfaceAudience.Private
public class SplitNormalizationPlan implements NormalizationPlan {
- private static final Log LOG = LogFactory.getLog(SplitNormalizationPlan.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(SplitNormalizationPlan.class.getName());
private RegionInfo regionInfo;
private byte[] splitPoint;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java
index a524879bf0..a17108fdca 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java
@@ -25,8 +25,6 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -54,7 +52,8 @@ import org.apache.hadoop.hbase.util.FSTableDescriptors;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
@@ -65,7 +64,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot
@InterfaceAudience.Private
public class CloneSnapshotProcedure
extends AbstractStateMachineTableProcedure {
- private static final Log LOG = LogFactory.getLog(CloneSnapshotProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CloneSnapshotProcedure.class);
private TableDescriptor tableDescriptor;
private SnapshotDescription snapshot;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.java
index fa743bdbdd..f0d0af8c53 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.java
@@ -19,11 +19,12 @@
package org.apache.hadoop.hbase.master.procedure;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NamespaceExistException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.master.MasterFileSystem;
import org.apache.hadoop.hbase.master.TableNamespaceManager;
import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
@@ -38,7 +39,7 @@ import org.apache.hadoop.hbase.util.FSUtils;
@InterfaceAudience.Private
public class CreateNamespaceProcedure
extends AbstractStateMachineNamespaceProcedure {
- private static final Log LOG = LogFactory.getLog(CreateNamespaceProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CreateNamespaceProcedure.class);
private NamespaceDescriptor nsDescriptor;
private Boolean traceEnabled;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java
index ed137c2598..63d6d2f7e4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java
@@ -22,8 +22,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.DoNotRetryIOException;
@@ -42,7 +40,8 @@ import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.ModifyRegionUtils;
import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
@@ -52,7 +51,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.C
@InterfaceAudience.Private
public class CreateTableProcedure
extends AbstractStateMachineTableProcedure {
- private static final Log LOG = LogFactory.getLog(CreateTableProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CreateTableProcedure.class);
private TableDescriptor tableDescriptor;
private List newRegions;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteNamespaceProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteNamespaceProcedure.java
index 9646946e89..1c587eb55d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteNamespaceProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteNamespaceProcedure.java
@@ -20,8 +20,7 @@ package org.apache.hadoop.hbase.master.procedure;
import java.io.FileNotFoundException;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -29,6 +28,8 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NamespaceNotFoundException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.constraint.ConstraintException;
import org.apache.hadoop.hbase.master.MasterFileSystem;
import org.apache.hadoop.hbase.master.TableNamespaceManager;
@@ -44,7 +45,7 @@ import org.apache.hadoop.hbase.util.FSUtils;
@InterfaceAudience.Private
public class DeleteNamespaceProcedure
extends AbstractStateMachineNamespaceProcedure {
- private static final Log LOG = LogFactory.getLog(DeleteNamespaceProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DeleteNamespaceProcedure.class);
private NamespaceDescriptor nsDescriptor;
private String namespaceName;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.java
index 4cc18755ec..151e3d65d1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.java
@@ -22,8 +22,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -49,7 +47,8 @@ import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
@@ -58,7 +57,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.D
@InterfaceAudience.Private
public class DeleteTableProcedure
extends AbstractStateMachineTableProcedure {
- private static final Log LOG = LogFactory.getLog(DeleteTableProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DeleteTableProcedure.class);
private List regions;
private TableName tableName;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.java
index 045ee9e7e8..e748c6ce7f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.java
@@ -19,13 +19,14 @@
package org.apache.hadoop.hbase.master.procedure;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotEnabledException;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.TableState;
import org.apache.hadoop.hbase.constraint.ConstraintException;
import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
@@ -38,7 +39,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.D
@InterfaceAudience.Private
public class DisableTableProcedure
extends AbstractStateMachineTableProcedure {
- private static final Log LOG = LogFactory.getLog(DisableTableProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DisableTableProcedure.class);
private TableName tableName;
private boolean skipTableStateCheck;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/EnableTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/EnableTableProcedure.java
index cf40696960..c501e5396a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/EnableTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/EnableTableProcedure.java
@@ -22,8 +22,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.MetaTableAccessor;
@@ -31,6 +29,8 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotDisabledException;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -49,7 +49,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.E
@InterfaceAudience.Private
public class EnableTableProcedure
extends AbstractStateMachineTableProcedure {
- private static final Log LOG = LogFactory.getLog(EnableTableProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(EnableTableProcedure.class);
private TableName tableName;
private boolean skipTableStateCheck;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterDDLOperationHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterDDLOperationHelper.java
index 02ecdc6b3d..6d06de2c51 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterDDLOperationHelper.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterDDLOperationHelper.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.master.procedure;
import java.io.IOException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -31,13 +29,15 @@ import org.apache.hadoop.hbase.mob.MobConstants;
import org.apache.hadoop.hbase.mob.MobUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Helper class for schema change procedures
*/
@InterfaceAudience.Private
public final class MasterDDLOperationHelper {
- private static final Log LOG = LogFactory.getLog(MasterDDLOperationHelper.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MasterDDLOperationHelper.class);
private MasterDDLOperationHelper() {}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.java
index c9c3ac9820..0a4c97db59 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.java
@@ -20,14 +20,14 @@ package org.apache.hadoop.hbase.master.procedure;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.conf.ConfigurationObserver;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.ipc.RpcServer;
import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
@@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.util.FSUtils;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class MasterProcedureEnv implements ConfigurationObserver {
- private static final Log LOG = LogFactory.getLog(MasterProcedureEnv.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MasterProcedureEnv.class);
@InterfaceAudience.Private
public static class WALStoreLeaseRecovery implements WALProcedureStore.LeaseRecovery {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java
index 9402845354..c60de5c9df 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java
@@ -26,8 +26,6 @@ import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableExistsException;
@@ -50,7 +48,8 @@ import org.apache.hadoop.hbase.util.AvlUtil.AvlLinkedNode;
import org.apache.hadoop.hbase.util.AvlUtil.AvlTree;
import org.apache.hadoop.hbase.util.AvlUtil.AvlTreeIterator;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -104,7 +103,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public class MasterProcedureScheduler extends AbstractProcedureScheduler {
- private static final Log LOG = LogFactory.getLog(MasterProcedureScheduler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MasterProcedureScheduler.class);
private final static ServerQueueKeyComparator SERVER_QUEUE_KEY_COMPARATOR =
new ServerQueueKeyComparator();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureUtil.java
index 7826f96f79..b87f437989 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureUtil.java
@@ -21,10 +21,10 @@ package org.apache.hadoop.hbase.master.procedure;
import java.io.IOException;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.master.MasterServices;
import org.apache.hadoop.hbase.procedure2.Procedure;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
@@ -36,7 +36,7 @@ import org.apache.hadoop.security.UserGroupInformation;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public final class MasterProcedureUtil {
- private static final Log LOG = LogFactory.getLog(MasterProcedureUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MasterProcedureUtil.class);
private MasterProcedureUtil() {}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyNamespaceProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyNamespaceProcedure.java
index 697a2ea668..2a7dc5b28b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyNamespaceProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyNamespaceProcedure.java
@@ -19,11 +19,12 @@
package org.apache.hadoop.hbase.master.procedure;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NamespaceNotFoundException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.master.TableNamespaceManager;
import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -36,7 +37,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.M
@InterfaceAudience.Private
public class ModifyNamespaceProcedure
extends AbstractStateMachineNamespaceProcedure {
- private static final Log LOG = LogFactory.getLog(ModifyNamespaceProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ModifyNamespaceProcedure.class);
private NamespaceDescriptor oldNsDescriptor;
private NamespaceDescriptor newNsDescriptor;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java
index bda8b81e98..f0be1e0819 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java
@@ -23,8 +23,6 @@ import java.util.HashSet;
import java.util.List;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.MetaTableAccessor;
@@ -42,7 +40,8 @@ import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyTableState;
@@ -50,7 +49,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.M
@InterfaceAudience.Private
public class ModifyTableProcedure
extends AbstractStateMachineTableProcedure {
- private static final Log LOG = LogFactory.getLog(ModifyTableProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ModifyTableProcedure.class);
private TableDescriptor unmodifiedTableDescriptor = null;
private TableDescriptor modifiedTableDescriptor;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.java
index 23b83acd64..ae37a48d26 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.java
@@ -26,8 +26,6 @@ import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CoordinatedStateException;
import org.apache.hadoop.hbase.NotAllMetaRegionsOnlineException;
@@ -40,6 +38,8 @@ import org.apache.hadoop.hbase.quotas.MasterQuotaManager;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Helper to synchronously wait on conditions.
@@ -50,7 +50,7 @@ import org.apache.yetus.audience.InterfaceStability;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public final class ProcedureSyncWait {
- private static final Log LOG = LogFactory.getLog(ProcedureSyncWait.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ProcedureSyncWait.class);
private ProcedureSyncWait() {}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.java
index 045c416ff5..72e0846cef 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.java
@@ -25,8 +25,6 @@ import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -36,7 +34,8 @@ import org.apache.hadoop.hbase.master.ServerListener;
import org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.ipc.RemoteException;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
@@ -57,7 +56,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionR
public class RSProcedureDispatcher
extends RemoteProcedureDispatcher
implements ServerListener {
- private static final Log LOG = LogFactory.getLog(RSProcedureDispatcher.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RSProcedureDispatcher.class);
public static final String RS_RPC_STARTUP_WAIT_TIME_CONF_KEY =
"hbase.regionserver.rpc.startup.waittime";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RecoverMetaProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RecoverMetaProcedure.java
index f1174d42ea..90dfff043a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RecoverMetaProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RecoverMetaProcedure.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.master.procedure;
import java.io.IOException;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -36,7 +34,8 @@ import org.apache.hadoop.hbase.procedure2.ProcedureYieldException;
import org.apache.hadoop.hbase.procedure2.StateMachineProcedure;
import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
import org.apache.zookeeper.KeeperException;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RecoverMetaState;
@@ -51,7 +50,7 @@ import com.google.common.base.Preconditions;
public class RecoverMetaProcedure
extends StateMachineProcedure
implements TableProcedureInterface {
- private static final Log LOG = LogFactory.getLog(RecoverMetaProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RecoverMetaProcedure.class);
private ServerName failedMetaServer;
private boolean shouldSplitWal;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.java
index 2cf558437a..9aa5171786 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.java
@@ -25,8 +25,6 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.DoNotRetryIOException;
@@ -49,7 +47,8 @@ import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
import org.apache.hadoop.hbase.snapshot.SnapshotManifest;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
@@ -59,7 +58,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot
@InterfaceAudience.Private
public class RestoreSnapshotProcedure
extends AbstractStateMachineTableProcedure {
- private static final Log LOG = LogFactory.getLog(RestoreSnapshotProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RestoreSnapshotProcedure.class);
private TableDescriptor modifiedTableDescriptor;
private List regionsToRestore = null;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
index 0e37c1174b..e9be05800d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
@@ -22,8 +22,6 @@ import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -38,6 +36,8 @@ import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException;
import org.apache.hadoop.hbase.procedure2.ProcedureYieldException;
import org.apache.hadoop.hbase.procedure2.StateMachineProcedure;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
@@ -56,7 +56,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.S
public class ServerCrashProcedure
extends StateMachineProcedure
implements ServerProcedureInterface {
- private static final Log LOG = LogFactory.getLog(ServerCrashProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ServerCrashProcedure.class);
/**
* Name of the crashed server to process.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.java
index dce3b41419..541fb8e1ae 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.java
@@ -23,8 +23,6 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotDisabledException;
import org.apache.hadoop.hbase.TableNotFoundException;
@@ -36,7 +34,8 @@ import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.util.ModifyRegionUtils;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
@@ -45,7 +44,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.T
@InterfaceAudience.Private
public class TruncateTableProcedure
extends AbstractStateMachineTableProcedure {
- private static final Log LOG = LogFactory.getLog(TruncateTableProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TruncateTableProcedure.class);
private boolean preserveSplits;
private List regions;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java
index 84c154f8bf..ee5afd7d1e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java
@@ -23,8 +23,6 @@ import java.util.List;
import java.util.Set;
import java.util.concurrent.ThreadPoolExecutor;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionReplicaUtil;
@@ -39,7 +37,8 @@ import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
import org.apache.zookeeper.KeeperException;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
/**
@@ -50,7 +49,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class DisabledTableSnapshotHandler extends TakeSnapshotHandler {
- private static final Log LOG = LogFactory.getLog(DisabledTableSnapshotHandler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DisabledTableSnapshotHandler.class);
/**
* @param snapshot descriptor of the snapshot to take
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java
index 399a1274e4..db2b9c888e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java
@@ -22,8 +22,6 @@ import java.util.HashSet;
import java.util.List;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
@@ -34,7 +32,8 @@ import org.apache.hadoop.hbase.procedure.ProcedureCoordinator;
import org.apache.hadoop.hbase.snapshot.HBaseSnapshotException;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
@@ -46,7 +45,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot
@InterfaceAudience.Private
public class EnabledTableSnapshotHandler extends TakeSnapshotHandler {
- private static final Log LOG = LogFactory.getLog(EnabledTableSnapshotHandler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(EnabledTableSnapshotHandler.class);
private final ProcedureCoordinator coordinator;
public EnabledTableSnapshotHandler(SnapshotDescription snapshot, MasterServices master,
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java
index b698082f74..1e1a3c421d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java
@@ -22,8 +22,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.MetaTableAccessor;
@@ -41,7 +39,8 @@ import org.apache.hadoop.hbase.snapshot.SnapshotReferenceUtil;
import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
@@ -77,7 +76,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot
@InterfaceAudience.Private
@InterfaceStability.Unstable
public final class MasterSnapshotVerifier {
- private static final Log LOG = LogFactory.getLog(MasterSnapshotVerifier.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MasterSnapshotVerifier.class);
private SnapshotDescription snapshot;
private FileSystem fs;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotFileCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotFileCache.java
index 56d1319c94..560e7f568c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotFileCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotFileCache.java
@@ -29,12 +29,6 @@ import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.locks.ReentrantLock;
-import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.yetus.audience.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -43,6 +37,13 @@ import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.snapshot.CorruptedSnapshotException;
import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
import org.apache.hadoop.hbase.util.FSUtils;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
/**
* Intelligently keep track of all the files for all the snapshots.
@@ -85,7 +86,7 @@ public class SnapshotFileCache implements Stoppable {
Collection filesUnderSnapshot(final Path snapshotDir) throws IOException;
}
- private static final Log LOG = LogFactory.getLog(SnapshotFileCache.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SnapshotFileCache.class);
private volatile boolean stop = false;
private final FileSystem fs;
private final SnapshotFileInspector fileInspector;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotHFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotHFileCleaner.java
index f3ca993576..a8475f0f6e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotHFileCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotHFileCleaner.java
@@ -22,10 +22,10 @@ import java.util.Collection;
import java.util.Collections;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.util.FSUtils;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
@InterfaceStability.Evolving
public class SnapshotHFileCleaner extends BaseHFileCleanerDelegate {
- private static final Log LOG = LogFactory.getLog(SnapshotHFileCleaner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SnapshotHFileCleaner.class);
/**
* Conf key for the frequency to attempt to refresh the cache of hfiles currently used in
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java
index 20a4f39935..3870601db4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java
@@ -29,8 +29,6 @@ import java.util.Map;
import java.util.Set;
import java.util.concurrent.ThreadPoolExecutor;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
@@ -83,7 +81,8 @@ import org.apache.hadoop.hbase.util.NonceKey;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
import org.apache.zookeeper.KeeperException;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
@@ -102,7 +101,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
@InterfaceStability.Unstable
public class SnapshotManager extends MasterProcedureManager implements Stoppable {
- private static final Log LOG = LogFactory.getLog(SnapshotManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SnapshotManager.class);
/** By default, check to see if the snapshot is complete every WAKE MILLIS (ms) */
private static final int SNAPSHOT_WAKE_MILLIS_DEFAULT = 500;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java
index 808cab5b35..9b077d15e6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java
@@ -25,8 +25,6 @@ import java.util.Set;
import java.util.concurrent.CancellationException;
import java.util.concurrent.locks.ReentrantLock;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -56,7 +54,8 @@ import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
/**
@@ -69,7 +68,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot
@InterfaceAudience.Private
public abstract class TakeSnapshotHandler extends EventHandler implements SnapshotSentinel,
ForeignExceptionSnare {
- private static final Log LOG = LogFactory.getLog(TakeSnapshotHandler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TakeSnapshotHandler.class);
private volatile boolean finished;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java
index 447629b550..01c195a4d3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java
@@ -23,8 +23,6 @@ import java.util.ArrayList;
import java.util.Date;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
@@ -52,6 +50,8 @@ import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Compact passed set of files in the mob-enabled column family.
@@ -59,7 +59,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class DefaultMobStoreCompactor extends DefaultCompactor {
- private static final Log LOG = LogFactory.getLog(DefaultMobStoreCompactor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DefaultMobStoreCompactor.class);
private long mobSizeThreshold;
private HMobStore mobStore;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
index 27809c4e00..a932dad708 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
@@ -24,8 +24,6 @@ import java.util.ArrayList;
import java.util.Date;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
@@ -46,6 +44,8 @@ import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* An implementation of the StoreFlusher. It extends the DefaultStoreFlusher.
@@ -65,7 +65,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class DefaultMobStoreFlusher extends DefaultStoreFlusher {
- private static final Log LOG = LogFactory.getLog(DefaultMobStoreFlusher.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DefaultMobStoreFlusher.class);
private final Object flushLock = new Object();
private long mobCellValueSizeThreshold = 0;
private Path targetPath;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/ExpiredMobFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/ExpiredMobFileCleaner.java
index 3924ee6483..053cba641f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/ExpiredMobFileCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/ExpiredMobFileCleaner.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.mob;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
@@ -29,6 +27,8 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.Connection;
@@ -47,7 +47,7 @@ import org.apache.hadoop.util.ToolRunner;
@InterfaceAudience.Private
public class ExpiredMobFileCleaner extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(ExpiredMobFileCleaner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ExpiredMobFileCleaner.class);
/**
* Cleans the MOB files when they're expired and their min versions are 0.
* If the latest timestamp of Cells in a MOB file is older than the TTL in the column family,
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobFileCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobFileCache.java
index 813de8ce11..13caee600f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobFileCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobFileCache.java
@@ -31,12 +31,12 @@ import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.LongAdder;
import java.util.concurrent.locks.ReentrantLock;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.IdLock;
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
@@ -50,7 +50,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa
@InterfaceAudience.Private
public class MobFileCache {
- private static final Log LOG = LogFactory.getLog(MobFileCache.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MobFileCache.class);
/*
* Eviction and statistics thread. Periodically run to print the statistics and
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java
index b68e4ffc77..9fa4e4cf83 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java
@@ -36,8 +36,6 @@ import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -79,6 +77,8 @@ import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.ReflectionUtils;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* The mob utilities
@@ -86,7 +86,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public final class MobUtils {
- private static final Log LOG = LogFactory.getLog(MobUtils.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MobUtils.class);
private final static long WEEKLY_THRESHOLD_MULTIPLIER = 7;
private final static long MONTHLY_THRESHOLD_MULTIPLIER = 4 * WEEKLY_THRESHOLD_MULTIPLIER;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java
index cf661db511..9c23470594 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java
@@ -35,13 +35,12 @@ import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NavigableMap;
+import java.util.Objects;
import java.util.TreeMap;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -49,7 +48,6 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
-import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
@@ -90,6 +88,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* An implementation of {@link MobCompactor} that compacts the mob files in partitions.
@@ -97,7 +97,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class PartitionedMobCompactor extends MobCompactor {
- private static final Log LOG = LogFactory.getLog(PartitionedMobCompactor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(PartitionedMobCompactor.class);
protected long mergeableSize;
protected int delFileMaxCount;
/** The number of files compacted in a batch */
@@ -362,7 +362,7 @@ public class PartitionedMobCompactor extends MobCompactor {
LOG.info(
"After a mob compaction with all files selected, archiving the del files ");
for (CompactionDelPartition delPartition : request.getDelPartitions()) {
- LOG.info(delPartition.listDelFiles());
+ LOG.info(Objects.toString(delPartition.listDelFiles()));
try {
MobUtils.removeMobFiles(conf, fs, tableName, mobTableDir, column.getName(),
delPartition.getStoreFiles());
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
index 837ddf007d..61d0007561 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
@@ -28,11 +28,11 @@ import java.util.Iterator;
import java.util.List;
import org.apache.commons.collections4.queue.CircularFifoQueue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Threads;
@@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
*/
@InterfaceAudience.Private
public class TaskMonitor {
- private static final Log LOG = LogFactory.getLog(TaskMonitor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TaskMonitor.class);
public static final String MAX_TASKS_KEY = "hbase.taskmonitor.max.tasks";
public static final int DEFAULT_MAX_TASKS = 1000;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/namespace/NamespaceAuditor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/namespace/NamespaceAuditor.java
index efe2c1e957..0a74b093a8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/namespace/NamespaceAuditor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/namespace/NamespaceAuditor.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.namespace;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
@@ -30,6 +28,8 @@ import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.master.MasterServices;
import org.apache.hadoop.hbase.quotas.QuotaExceededException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* The Class NamespaceAuditor performs checks to ensure operations like table creation
@@ -38,7 +38,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class NamespaceAuditor {
- private static final Log LOG = LogFactory.getLog(NamespaceAuditor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(NamespaceAuditor.class);
private NamespaceStateManager stateManager;
private MasterServices masterServices;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/namespace/NamespaceStateManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/namespace/NamespaceStateManager.java
index c62594adc8..3cf5a25723 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/namespace/NamespaceStateManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/namespace/NamespaceStateManager.java
@@ -22,8 +22,6 @@ import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.TableName;
@@ -33,6 +31,8 @@ import org.apache.hadoop.hbase.master.TableNamespaceManager;
import org.apache.hadoop.hbase.quotas.QuotaExceededException;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* NamespaceStateManager manages state (in terms of quota) of all the namespaces. It contains
@@ -41,7 +41,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
class NamespaceStateManager {
- private static final Log LOG = LogFactory.getLog(NamespaceStateManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(NamespaceStateManager.class);
private ConcurrentMap nsStateCache;
private MasterServices master;
private volatile boolean initialized = false;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Procedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Procedure.java
index 1e12304b76..15e882b81d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Procedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Procedure.java
@@ -25,9 +25,9 @@ import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionListener;
@@ -67,7 +67,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
*/
@InterfaceAudience.Private
public class Procedure implements Callable, ForeignExceptionListener {
- private static final Log LOG = LogFactory.getLog(Procedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Procedure.class);
//
// Arguments and naming
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureCoordinator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureCoordinator.java
index 2f67d41bee..c9f068b632 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureCoordinator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureCoordinator.java
@@ -29,9 +29,9 @@ import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.DaemonThreadFactory;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
@@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker;
*/
@InterfaceAudience.Private
public class ProcedureCoordinator {
- private static final Log LOG = LogFactory.getLog(ProcedureCoordinator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ProcedureCoordinator.class);
final static long KEEP_ALIVE_MILLIS_DEFAULT = 5000;
final static long TIMEOUT_MILLIS_DEFAULT = 60000;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureManagerHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureManagerHost.java
index 09a2972030..af4d2d7104 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureManagerHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureManagerHost.java
@@ -23,11 +23,11 @@ import java.util.HashSet;
import java.util.Set;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Provides the common setup framework and runtime services for globally
@@ -44,7 +44,7 @@ public abstract class ProcedureManagerHost {
public static final String MASTER_PROCEDURE_CONF_KEY =
"hbase.procedure.master.classes";
- private static final Log LOG = LogFactory.getLog(ProcedureManagerHost.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ProcedureManagerHost.class);
protected Set procedures = new HashSet<>();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureMember.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureMember.java
index c8399ba4bd..86923ae16e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureMember.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureMember.java
@@ -26,9 +26,9 @@ import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.DaemonThreadFactory;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
@@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker;
*/
@InterfaceAudience.Private
public class ProcedureMember implements Closeable {
- private static final Log LOG = LogFactory.getLog(ProcedureMember.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ProcedureMember.class);
final static long KEEP_ALIVE_MILLIS_DEFAULT = 5000;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/RegionServerProcedureManagerHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/RegionServerProcedureManagerHost.java
index 0f4ea64586..5cb2529f51 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/RegionServerProcedureManagerHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/RegionServerProcedureManagerHost.java
@@ -19,13 +19,13 @@ package org.apache.hadoop.hbase.procedure;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.procedure.flush.RegionServerFlushTableProcedureManager;
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
import org.apache.hadoop.hbase.regionserver.snapshot.RegionServerSnapshotManager;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Provides the globally barriered procedure framework and environment
@@ -36,8 +36,8 @@ import org.apache.zookeeper.KeeperException;
public class RegionServerProcedureManagerHost extends
ProcedureManagerHost {
- private static final Log LOG = LogFactory
- .getLog(RegionServerProcedureManagerHost.class);
+ private static final Logger LOG = LoggerFactory
+ .getLogger(RegionServerProcedureManagerHost.class);
public void initialize(RegionServerServices rss) throws KeeperException {
for (RegionServerProcedureManager proc : procedures) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java
index 892733828e..6416e6a65b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java
@@ -21,14 +21,14 @@ import java.io.IOException;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionListener;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;
import org.apache.hadoop.hbase.errorhandling.TimeoutExceptionInjector;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Distributed procedure member's Subprocedure. A procedure is sarted on a ProcedureCoordinator
@@ -51,7 +51,7 @@ import org.apache.zookeeper.KeeperException;
* barrierName. (ex: snapshot121126).
*/
abstract public class Subprocedure implements Callable {
- private static final Log LOG = LogFactory.getLog(Subprocedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Subprocedure.class);
// Name of the procedure
final private String barrierName;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureCoordinator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureCoordinator.java
index 609ce8ee3b..c1fb8f5c1a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureCoordinator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureCoordinator.java
@@ -22,8 +22,6 @@ import java.io.InterruptedIOException;
import java.util.Arrays;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
@@ -31,13 +29,15 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* ZooKeeper based {@link ProcedureCoordinatorRpcs} for a {@link ProcedureCoordinator}
*/
@InterfaceAudience.Private
public class ZKProcedureCoordinator implements ProcedureCoordinatorRpcs {
- private static final Log LOG = LogFactory.getLog(ZKProcedureCoordinator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ZKProcedureCoordinator.class);
private ZKProcedureUtil zkProc = null;
protected ProcedureCoordinator coordinator = null; // if started this should be non-null
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java
index 45e6760967..ea41ae8972 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java
@@ -21,8 +21,6 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
@@ -31,6 +29,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* ZooKeeper based controller for a procedure member.
@@ -52,7 +52,7 @@ import org.apache.zookeeper.KeeperException;
*/
@InterfaceAudience.Private
public class ZKProcedureMemberRpcs implements ProcedureMemberRpcs {
- private static final Log LOG = LogFactory.getLog(ZKProcedureMemberRpcs.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ZKProcedureMemberRpcs.class);
private final ZKProcedureUtil zkController;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureUtil.java
index 0349290eab..976e36b49b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureUtil.java
@@ -21,14 +21,14 @@ import java.io.Closeable;
import java.io.IOException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.zookeeper.ZKListener;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This is a shared ZooKeeper-based znode management utils for distributed procedure. All znode
@@ -52,7 +52,7 @@ import org.apache.zookeeper.KeeperException;
public abstract class ZKProcedureUtil
extends ZKListener implements Closeable {
- private static final Log LOG = LogFactory.getLog(ZKProcedureUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ZKProcedureUtil.class);
public static final String ACQUIRED_BARRIER_ZNODE_DEFAULT = "acquired";
public static final String REACHED_BARRIER_ZNODE_DEFAULT = "reached";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/FlushTableSubprocedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/FlushTableSubprocedure.java
index 1b4c561c23..5c005a75a9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/FlushTableSubprocedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/FlushTableSubprocedure.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.procedure.flush;
import java.util.List;
import java.util.concurrent.Callable;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
import org.apache.hadoop.hbase.procedure.ProcedureMember;
@@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.regionserver.HRegion;
*/
@InterfaceAudience.Private
public class FlushTableSubprocedure extends Subprocedure {
- private static final Log LOG = LogFactory.getLog(FlushTableSubprocedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FlushTableSubprocedure.class);
private final String table;
private final List regions;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/MasterFlushTableProcedureManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/MasterFlushTableProcedureManager.java
index 66f9240f7b..55d73d87e7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/MasterFlushTableProcedureManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/MasterFlushTableProcedureManager.java
@@ -25,8 +25,6 @@ import java.util.Map;
import java.util.Set;
import java.util.concurrent.ThreadPoolExecutor;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.MetaTableAccessor;
@@ -47,7 +45,8 @@ import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
@@ -65,7 +64,7 @@ public class MasterFlushTableProcedureManager extends MasterProcedureManager {
"hbase.flush.procedure.master.threads";
private static final int FLUSH_PROC_POOL_THREADS_DEFAULT = 1;
- private static final Log LOG = LogFactory.getLog(MasterFlushTableProcedureManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MasterFlushTableProcedureManager.class);
private MasterServices master;
private ProcedureCoordinator coordinator;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/RegionServerFlushTableProcedureManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/RegionServerFlushTableProcedureManager.java
index d328561ce2..bf55c0c885 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/RegionServerFlushTableProcedureManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/RegionServerFlushTableProcedureManager.java
@@ -29,8 +29,6 @@ import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
@@ -51,13 +49,16 @@ import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This manager class handles flushing of the regions for table on a {@link HRegionServer}.
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class RegionServerFlushTableProcedureManager extends RegionServerProcedureManager {
- private static final Log LOG = LogFactory.getLog(RegionServerFlushTableProcedureManager.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(RegionServerFlushTableProcedureManager.class);
private static final String CONCURENT_FLUSH_TASKS_KEY =
"hbase.flush.procedure.region.concurrentTasks";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/DefaultOperationQuota.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/DefaultOperationQuota.java
index 38fc488e48..a15aeb6852 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/DefaultOperationQuota.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/DefaultOperationQuota.java
@@ -21,17 +21,17 @@ package org.apache.hadoop.hbase.quotas;
import java.util.Arrays;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Result;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class DefaultOperationQuota implements OperationQuota {
- private static final Log LOG = LogFactory.getLog(DefaultOperationQuota.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DefaultOperationQuota.class);
private final List limiters;
private long writeAvailable = 0;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileSystemUtilizationChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileSystemUtilizationChore.java
index a76e9c1937..eded076472 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileSystemUtilizationChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileSystemUtilizationChore.java
@@ -23,8 +23,6 @@ import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -34,13 +32,15 @@ import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A chore which computes the size of each {@link HRegion} on the FileSystem hosted by the given {@link HRegionServer}.
*/
@InterfaceAudience.Private
public class FileSystemUtilizationChore extends ScheduledChore {
- private static final Log LOG = LogFactory.getLog(FileSystemUtilizationChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FileSystemUtilizationChore.class);
static final String FS_UTILIZATION_CHORE_PERIOD_KEY = "hbase.regionserver.quotas.fs.utilization.chore.period";
static final int FS_UTILIZATION_CHORE_PERIOD_DEFAULT = 1000 * 60 * 5; // 5 minutes in millis
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/MasterQuotaManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/MasterQuotaManager.java
index e4fa3eaa1d..79be1ac6bc 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/MasterQuotaManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/MasterQuotaManager.java
@@ -28,8 +28,6 @@ import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.commons.lang3.builder.HashCodeBuilder;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
@@ -41,7 +39,8 @@ import org.apache.hadoop.hbase.namespace.NamespaceAuditor;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -59,7 +58,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRe
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class MasterQuotaManager implements RegionStateListener {
- private static final Log LOG = LogFactory.getLog(MasterQuotaManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MasterQuotaManager.class);
private static final Map EMPTY_MAP = Collections.unmodifiableMap(
new HashMap<>());
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaCache.java
index 6a5e38c5e5..adabdac6a8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaCache.java
@@ -30,14 +30,14 @@ import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@@ -57,7 +57,7 @@ import org.apache.hadoop.security.UserGroupInformation;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class QuotaCache implements Stoppable {
- private static final Log LOG = LogFactory.getLog(QuotaCache.class);
+ private static final Logger LOG = LoggerFactory.getLogger(QuotaCache.class);
public static final String REFRESH_CONF_KEY = "hbase.quota.refresh.period";
private static final int REFRESH_DEFAULT_PERIOD = 5 * 60000; // 5min
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaObserverChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaObserverChore.java
index bfbda35cbc..39048d79ee 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaObserverChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaObserverChore.java
@@ -26,8 +26,6 @@ import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.Stoppable;
@@ -40,7 +38,8 @@ import org.apache.hadoop.hbase.master.MetricsMaster;
import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot.SpaceQuotaStatus;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.HashMultimap;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables;
@@ -53,7 +52,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota;
*/
@InterfaceAudience.Private
public class QuotaObserverChore extends ScheduledChore {
- private static final Log LOG = LogFactory.getLog(QuotaObserverChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(QuotaObserverChore.class);
static final String QUOTA_OBSERVER_CHORE_PERIOD_KEY =
"hbase.master.quotas.observer.chore.period";
static final int QUOTA_OBSERVER_CHORE_PERIOD_DEFAULT = 1000 * 60 * 1; // 1 minutes in millis
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java
index f3a745ca3f..6bc3ce9d47 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java
@@ -23,8 +23,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -34,6 +32,8 @@ import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
@@ -52,7 +52,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class QuotaUtil extends QuotaTableUtil {
- private static final Log LOG = LogFactory.getLog(QuotaUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(QuotaUtil.class);
public static final String QUOTA_CONF_KEY = "hbase.quota.enabled";
private static final boolean QUOTA_ENABLED_DEFAULT = false;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerRpcQuotaManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerRpcQuotaManager.java
index 51b8cc950b..62e06146b3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerRpcQuotaManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerRpcQuotaManager.java
@@ -22,10 +22,10 @@ import java.io.IOException;
import java.util.List;
import java.util.Optional;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.ipc.RpcScheduler;
import org.apache.hadoop.hbase.ipc.RpcServer;
@@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class RegionServerRpcQuotaManager {
- private static final Log LOG = LogFactory.getLog(RegionServerRpcQuotaManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionServerRpcQuotaManager.class);
private final RegionServerServices rsServices;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerSpaceQuotaManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerSpaceQuotaManager.java
index 80bbdc3626..0a998dc469 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerSpaceQuotaManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerSpaceQuotaManager.java
@@ -24,10 +24,10 @@ import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicReference;
import java.util.Map.Entry;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot.SpaceQuotaStatus;
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
@@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public class RegionServerSpaceQuotaManager {
- private static final Log LOG = LogFactory.getLog(RegionServerSpaceQuotaManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionServerSpaceQuotaManager.class);
private final RegionServerServices rsServices;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SnapshotQuotaObserverChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SnapshotQuotaObserverChore.java
index 4f2efc7e37..78bbf755e8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SnapshotQuotaObserverChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SnapshotQuotaObserverChore.java
@@ -32,8 +32,6 @@ import java.util.function.Predicate;
import java.util.stream.Collectors;
import org.apache.commons.lang3.builder.HashCodeBuilder;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -43,6 +41,8 @@ import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Table;
@@ -67,7 +67,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap;
*/
@InterfaceAudience.Private
public class SnapshotQuotaObserverChore extends ScheduledChore {
- private static final Log LOG = LogFactory.getLog(SnapshotQuotaObserverChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SnapshotQuotaObserverChore.class);
static final String SNAPSHOT_QUOTA_CHORE_PERIOD_KEY =
"hbase.master.quotas.snapshot.chore.period";
static final int SNAPSHOT_QUOTA_CHORE_PERIOD_DEFAULT = 1000 * 60 * 5; // 5 minutes in millis
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SpaceLimitingException.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SpaceLimitingException.java
index bdacd334a8..526f2e8602 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SpaceLimitingException.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SpaceLimitingException.java
@@ -17,9 +17,9 @@
package org.apache.hadoop.hbase.quotas;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* An Exception that is thrown when a space quota is in violation.
@@ -27,7 +27,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Public
public class SpaceLimitingException extends QuotaExceededException {
private static final long serialVersionUID = 2319438922387583600L;
- private static final Log LOG = LogFactory.getLog(SpaceLimitingException.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SpaceLimitingException.class);
private static final String MESSAGE_PREFIX = SpaceLimitingException.class.getName() + ": ";
private final String policyName;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SpaceQuotaRefresherChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SpaceQuotaRefresherChore.java
index e86e9ceecb..d3be620877 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SpaceQuotaRefresherChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SpaceQuotaRefresherChore.java
@@ -22,12 +22,12 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
@@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.util.Bytes;
*/
@InterfaceAudience.Private
public class SpaceQuotaRefresherChore extends ScheduledChore {
- private static final Log LOG = LogFactory.getLog(SpaceQuotaRefresherChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SpaceQuotaRefresherChore.class);
static final String POLICY_REFRESHER_CHORE_PERIOD_KEY =
"hbase.regionserver.quotas.policy.refresher.chore.period";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableQuotaSnapshotStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableQuotaSnapshotStore.java
index dfaabec026..664e26848a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableQuotaSnapshotStore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableQuotaSnapshotStore.java
@@ -24,8 +24,6 @@ import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.TableName;
@@ -37,7 +35,8 @@ import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot.SpaceQuotaStatus;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Predicate;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
@@ -50,7 +49,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota;
*/
@InterfaceAudience.Private
public class TableQuotaSnapshotStore implements QuotaSnapshotStore {
- private static final Log LOG = LogFactory.getLog(TableQuotaSnapshotStore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableQuotaSnapshotStore.class);
private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
private final ReadLock rlock = lock.readLock();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableSpaceQuotaSnapshotNotifier.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableSpaceQuotaSnapshotNotifier.java
index f9813e55f4..d81d7d304a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableSpaceQuotaSnapshotNotifier.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableSpaceQuotaSnapshotNotifier.java
@@ -18,18 +18,18 @@ package org.apache.hadoop.hbase.quotas;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A {@link SpaceQuotaSnapshotNotifier} which uses the hbase:quota table.
*/
public class TableSpaceQuotaSnapshotNotifier implements SpaceQuotaSnapshotNotifier {
- private static final Log LOG = LogFactory.getLog(TableSpaceQuotaSnapshotNotifier.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableSpaceQuotaSnapshotNotifier.class);
private Connection conn;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/DisableTableViolationPolicyEnforcement.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/DisableTableViolationPolicyEnforcement.java
index 806cc763f6..c85ba21db9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/DisableTableViolationPolicyEnforcement.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/DisableTableViolationPolicyEnforcement.java
@@ -18,11 +18,11 @@ package org.apache.hadoop.hbase.quotas.policies;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TableNotDisabledException;
import org.apache.hadoop.hbase.TableNotEnabledException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.quotas.SpaceLimitingException;
import org.apache.hadoop.hbase.quotas.SpaceViolationPolicy;
@@ -34,7 +34,8 @@ import org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement;
*/
@InterfaceAudience.Private
public class DisableTableViolationPolicyEnforcement extends DefaultViolationPolicyEnforcement {
- private static final Log LOG = LogFactory.getLog(DisableTableViolationPolicyEnforcement.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(DisableTableViolationPolicyEnforcement.class);
@Override
public void enable() throws IOException {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/NoWritesCompactionsViolationPolicyEnforcement.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/NoWritesCompactionsViolationPolicyEnforcement.java
index 5d5af2f38d..66dfee950b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/NoWritesCompactionsViolationPolicyEnforcement.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/NoWritesCompactionsViolationPolicyEnforcement.java
@@ -18,9 +18,9 @@ package org.apache.hadoop.hbase.quotas.policies;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.quotas.SpaceViolationPolicy;
import org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement;
@@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement;
@InterfaceAudience.Private
public class NoWritesCompactionsViolationPolicyEnforcement
extends NoWritesViolationPolicyEnforcement {
- private static final Log LOG = LogFactory.getLog(
+ private static final Logger LOG = LoggerFactory.getLogger(
NoWritesCompactionsViolationPolicyEnforcement.class);
private AtomicBoolean disableCompactions = new AtomicBoolean(false);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMemStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMemStore.java
index 4614935cfc..bf150a4c1d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMemStore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMemStore.java
@@ -24,12 +24,12 @@ import java.util.List;
import java.util.NavigableSet;
import java.util.SortedSet;
-import org.apache.commons.logging.Log;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.ExtendedCell;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
import org.apache.hadoop.hbase.exceptions.UnexpectedStateException;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
@@ -193,7 +193,7 @@ public abstract class AbstractMemStore implements MemStore {
return conf;
}
- protected void dump(Log log) {
+ protected void dump(Logger log) {
active.dump(log);
snapshot.dump(log);
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.java
index 76840b70b4..2fdab81313 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.java
@@ -22,10 +22,10 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.CellSink;
/**
@@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.regionserver.CellSink;
@InterfaceAudience.Private
public abstract class AbstractMultiFileWriter implements CellSink, ShipperListener {
- private static final Log LOG = LogFactory.getLog(AbstractMultiFileWriter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AbstractMultiFileWriter.class);
/** Factory that is used to produce single StoreFile.Writer-s */
protected WriterFactory writerFactory;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AnnotationReadingPriorityFunction.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AnnotationReadingPriorityFunction.java
index ac0379bf96..4eb3419ad8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AnnotationReadingPriorityFunction.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AnnotationReadingPriorityFunction.java
@@ -21,11 +21,11 @@ import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.ipc.PriorityFunction;
import org.apache.hadoop.hbase.ipc.QosPriority;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
@@ -67,8 +67,8 @@ import org.apache.hadoop.hbase.security.User;
//to figure out whether it is a meta region or not.
@InterfaceAudience.Private
public class AnnotationReadingPriorityFunction implements PriorityFunction {
- private static final Log LOG =
- LogFactory.getLog(AnnotationReadingPriorityFunction.class.getName());
+ private static final Logger LOG =
+ LoggerFactory.getLogger(AnnotationReadingPriorityFunction.class.getName());
/** Used to control the scan delay, currently sqrt(numNextCall * weight) */
public static final String SCAN_VTIME_WEIGHT_CONF_KEY = "hbase.ipc.server.scan.vtime.weight";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BusyRegionSplitPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BusyRegionSplitPolicy.java
index 0b251153df..d51d29441e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BusyRegionSplitPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BusyRegionSplitPolicy.java
@@ -18,11 +18,11 @@
package org.apache.hadoop.hbase.regionserver;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
/**
@@ -39,7 +39,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class BusyRegionSplitPolicy extends IncreasingToUpperBoundRegionSplitPolicy {
- private static final Log LOG = LogFactory.getLog(BusyRegionSplitPolicy.class);
+ private static final Logger LOG = LoggerFactory.getLogger(BusyRegionSplitPolicy.class);
// Maximum fraction blocked write requests before region is considered for split
private float maxBlockedRequests;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellFlatMap.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellFlatMap.java
index fe510ae457..17e64b0678 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellFlatMap.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellFlatMap.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.hbase.regionserver;
import org.apache.hadoop.hbase.Cell;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.util.Collection;
import java.util.Comparator;
@@ -28,8 +30,6 @@ import java.util.Map;
import java.util.NavigableSet;
import java.util.NavigableMap;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
/**
@@ -44,7 +44,7 @@ import org.apache.commons.logging.LogFactory;
*/
@InterfaceAudience.Private
public abstract class CellFlatMap implements NavigableMap {
- private static final Log LOG = LogFactory.getLog(CellFlatMap.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CellFlatMap.class);
private final Comparator super Cell> comparator;
protected int minCellIdx = 0; // the index of the minimal cell (for sub-sets)
protected int maxCellIdx = 0; // the index of the cell after the maximal cell (for sub-sets)
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java
index faf517bac5..a0cad8ac8c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java
@@ -30,9 +30,9 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.LongAdder;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.HeapMemoryManager.HeapMemoryTuneObserver;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.StringUtils;
@@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa
*/
@InterfaceAudience.Private
public class ChunkCreator {
- private static final Log LOG = LogFactory.getLog(ChunkCreator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ChunkCreator.class);
// monotonically increasing chunkid
private AtomicInteger chunkID = new AtomicInteger(1);
// maps the chunk against the monotonically increasing chunk id. We need to preserve the
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java
index a8459da395..e143511425 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java
@@ -37,8 +37,6 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.IntSupplier;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.conf.ConfigurationManager;
import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;
@@ -55,7 +53,8 @@ import org.apache.hadoop.hbase.util.StealJobQueue;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
@@ -64,7 +63,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
*/
@InterfaceAudience.Private
public class CompactSplit implements CompactionRequester, PropagatingConfigurationObserver {
- private static final Log LOG = LogFactory.getLog(CompactSplit.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CompactSplit.class);
// Configuration key for the large compaction threads.
public final static String LARGE_COMPACTION_THREADS =
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactedHFilesDischarger.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactedHFilesDischarger.java
index 8a0dee67e8..3074dad53d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactedHFilesDischarger.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactedHFilesDischarger.java
@@ -19,14 +19,13 @@ package org.apache.hadoop.hbase.regionserver;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.executor.EventType;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -36,7 +35,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public class CompactedHFilesDischarger extends ScheduledChore {
- private static final Log LOG = LogFactory.getLog(CompactedHFilesDischarger.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CompactedHFilesDischarger.class);
private RegionServerServices regionServerServices;
// Default is to use executor
@VisibleForTesting
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java
index f1232f84f2..7b885ff1af 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java
@@ -26,14 +26,14 @@ import java.util.List;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.MemoryCompactionPolicy;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@@ -69,7 +69,7 @@ public class CompactingMemStore extends AbstractMemStore {
"hbase.memstore.inmemoryflush.threshold.factor";
private static final double IN_MEMORY_FLUSH_THRESHOLD_FACTOR_DEFAULT = 0.02;
- private static final Log LOG = LogFactory.getLog(CompactingMemStore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CompactingMemStore.class);
private HStore store;
private RegionServicesForStores regionServices;
private CompactionPipeline pipeline;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionPipeline.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionPipeline.java
index 2f479e9016..49abe72fa3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionPipeline.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionPipeline.java
@@ -23,9 +23,9 @@ import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
@@ -54,7 +54,7 @@ import org.apache.hadoop.hbase.util.ClassSize;
*/
@InterfaceAudience.Private
public class CompactionPipeline {
- private static final Log LOG = LogFactory.getLog(CompactionPipeline.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CompactionPipeline.class);
public final static long FIXED_OVERHEAD = ClassSize
.align(ClassSize.OBJECT + (3 * ClassSize.REFERENCE) + Bytes.SIZEOF_LONG);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java
index 93658193c1..5e8a8b3f1b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java
@@ -24,12 +24,11 @@ import java.util.LinkedList;
import java.util.List;
import java.util.SortedSet;
-import org.apache.commons.logging.Log;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -281,7 +280,7 @@ public class CompositeImmutableSegment extends ImmutableSegment {
/**
* Dumps all cells of the segment into the given log
*/
- void dump(Log log) {
+ void dump(Logger log) {
for (ImmutableSegment s : segments) {
s.dump(log);
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.java
index 13c344150c..2ff7d58b82 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.java
@@ -24,9 +24,9 @@ import static org.apache.hadoop.hbase.HConstants.HFILE_BLOCK_CACHE_SIZE_KEY;
import static org.apache.hadoop.hbase.regionserver.HeapMemoryManager.MEMSTORE_SIZE_MAX_RANGE_KEY;
import static org.apache.hadoop.hbase.regionserver.HeapMemoryManager.MEMSTORE_SIZE_MIN_RANGE_KEY;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.io.util.MemorySizeUtil;
@@ -93,7 +93,7 @@ class DefaultHeapMemoryTuner implements HeapMemoryTuner {
// NEUTRAL(given that last tuner period was also NEUTRAL).
private static final double TUNER_STEP_EPS = 1e-6;
- private Log LOG = LogFactory.getLog(DefaultHeapMemoryTuner.class);
+ private Logger LOG = LoggerFactory.getLogger(DefaultHeapMemoryTuner.class);
private TunerResult TUNER_RESULT = new TunerResult(true);
private Configuration conf;
private float sufficientMemoryLevel = DEFAULT_SUFFICIENT_MEMORY_LEVEL_VALUE;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultMemStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultMemStore.java
index 0e0276a1f4..061e4d0733 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultMemStore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultMemStore.java
@@ -24,8 +24,6 @@ import java.lang.management.RuntimeMXBean;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
@@ -33,6 +31,8 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@@ -55,7 +55,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
*/
@InterfaceAudience.Private
public class DefaultMemStore extends AbstractMemStore {
- private static final Log LOG = LogFactory.getLog(DefaultMemStore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DefaultMemStore.class);
public final static long DEEP_OVERHEAD = ClassSize.align(AbstractMemStore.DEEP_OVERHEAD);
public final static long FIXED_OVERHEAD = ClassSize.align(AbstractMemStore.FIXED_OVERHEAD);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.java
index e1f31bbe44..47d22b50d8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.java
@@ -26,15 +26,14 @@ import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableCollection;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables;
@@ -44,7 +43,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables;
*/
@InterfaceAudience.Private
class DefaultStoreFileManager implements StoreFileManager {
- private static final Log LOG = LogFactory.getLog(DefaultStoreFileManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DefaultStoreFileManager.class);
private final CellComparator cellComparator;
private final CompactionConfiguration comConf;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java
index d666ba9e95..b3f0a44dc8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java
@@ -22,9 +22,9 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
@@ -36,7 +36,7 @@ import org.apache.hadoop.util.StringUtils;
*/
@InterfaceAudience.Private
public class DefaultStoreFlusher extends StoreFlusher {
- private static final Log LOG = LogFactory.getLog(DefaultStoreFlusher.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DefaultStoreFlusher.class);
private final Object flushLock = new Object();
public DefaultStoreFlusher(Configuration conf, HStore store) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DelimitedKeyPrefixRegionSplitPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DelimitedKeyPrefixRegionSplitPolicy.java
index 51790140aa..483c155f83 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DelimitedKeyPrefixRegionSplitPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DelimitedKeyPrefixRegionSplitPolicy.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.regionserver;
import java.util.Arrays;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.Bytes;
/**
@@ -41,8 +41,8 @@ import org.apache.hadoop.hbase.util.Bytes;
@InterfaceAudience.Private
public class DelimitedKeyPrefixRegionSplitPolicy extends IncreasingToUpperBoundRegionSplitPolicy {
- private static final Log LOG = LogFactory
- .getLog(DelimitedKeyPrefixRegionSplitPolicy.class);
+ private static final Logger LOG = LoggerFactory
+ .getLogger(DelimitedKeyPrefixRegionSplitPolicy.class);
public static final String DELIMITER_KEY = "DelimitedKeyPrefixRegionSplitPolicy.delimiter";
private byte[] delimiter = null;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushAllLargeStoresPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushAllLargeStoresPolicy.java
index e4476d040b..0f0117899f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushAllLargeStoresPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushAllLargeStoresPolicy.java
@@ -21,10 +21,10 @@ import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A {@link FlushPolicy} that only flushes store larger a given threshold. If no store is large
@@ -33,7 +33,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class FlushAllLargeStoresPolicy extends FlushLargeStoresPolicy {
- private static final Log LOG = LogFactory.getLog(FlushAllLargeStoresPolicy.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FlushAllLargeStoresPolicy.class);
@Override
protected void configureForRegion(HRegion region) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushLargeStoresPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushLargeStoresPolicy.java
index 1610fd882f..74bde60397 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushLargeStoresPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushLargeStoresPolicy.java
@@ -17,10 +17,10 @@
*/
package org.apache.hadoop.hbase.regionserver;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A {@link FlushPolicy} that only flushes store larger a given threshold. If no store is large
@@ -29,7 +29,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public abstract class FlushLargeStoresPolicy extends FlushPolicy {
- private static final Log LOG = LogFactory.getLog(FlushLargeStoresPolicy.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FlushLargeStoresPolicy.class);
public static final String HREGION_COLUMNFAMILY_FLUSH_SIZE_LOWER_BOUND =
"hbase.hregion.percolumnfamilyflush.size.lower.bound";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushPolicyFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushPolicyFactory.java
index 2f273cab6c..59f925fecf 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushPolicyFactory.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushPolicyFactory.java
@@ -19,12 +19,12 @@ package org.apache.hadoop.hbase.regionserver;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.util.ReflectionUtils;
@@ -37,7 +37,7 @@ import org.apache.hadoop.util.ReflectionUtils;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class FlushPolicyFactory {
- private static final Log LOG = LogFactory.getLog(FlushPolicyFactory.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FlushPolicyFactory.class);
public static final String HBASE_FLUSH_POLICY_KEY = "hbase.regionserver.flush.policy";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java
index e6ca462d8e..d56a1c2b2b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java
@@ -27,8 +27,6 @@ import java.util.NavigableSet;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -59,6 +57,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.HFileArchiveUtil;
import org.apache.hadoop.hbase.util.IdLock;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* The store implementation to save MOBs (medium objects), it extends the HStore.
@@ -78,7 +78,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class HMobStore extends HStore {
- private static final Log LOG = LogFactory.getLog(HMobStore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HMobStore.class);
private MobCacheConfig mobCacheConfig;
private Path homePath;
private Path mobFamilyPath;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 98e9df66c8..bb01fe8443 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -70,8 +70,6 @@ import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.function.Function;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -198,6 +196,8 @@ import org.apache.hadoop.io.MultipleIOException;
import org.apache.hadoop.util.StringUtils;
import org.apache.htrace.core.TraceScope;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import edu.umd.cs.findbugs.annotations.Nullable;
@@ -219,7 +219,7 @@ import edu.umd.cs.findbugs.annotations.Nullable;
@SuppressWarnings("deprecation")
@InterfaceAudience.Private
public class HRegion implements HeapSize, PropagatingConfigurationObserver, Region {
- private static final Log LOG = LogFactory.getLog(HRegion.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HRegion.class);
public static final String LOAD_CFS_ON_DEMAND_CONFIG_KEY =
"hbase.hregion.scan.loadColumnFamiliesOnDemand";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java
index 4fc9ffe5cd..4788ac95c6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java
@@ -28,8 +28,6 @@ import java.util.List;
import java.util.Optional;
import java.util.UUID;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -54,7 +52,8 @@ import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import edu.umd.cs.findbugs.annotations.Nullable;
@@ -65,7 +64,7 @@ import edu.umd.cs.findbugs.annotations.Nullable;
*/
@InterfaceAudience.Private
public class HRegionFileSystem {
- private static final Log LOG = LogFactory.getLog(HRegionFileSystem.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HRegionFileSystem.class);
/** Name of the region info file that resides just under the region directory. */
public final static String REGION_INFO_FILE = ".regioninfo";
@@ -820,7 +819,7 @@ public class HRegionFileSystem {
* @param LOG log to output information
* @throws IOException if an unexpected exception occurs
*/
- void logFileSystemState(final Log LOG) throws IOException {
+ void logFileSystemState(final Logger LOG) throws IOException {
FSUtils.logFileSystemState(fs, this.getRegionDir(), LOG);
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
index e2d6ba061b..cc6e073cbb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
@@ -53,8 +53,6 @@ import java.util.function.Function;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.SystemUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -113,6 +111,7 @@ import org.apache.hadoop.hbase.ipc.RpcServer;
import org.apache.hadoop.hbase.ipc.RpcServerInterface;
import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
import org.apache.hadoop.hbase.ipc.ServerRpcController;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.master.LoadBalancer;
import org.apache.hadoop.hbase.master.RegionState.State;
@@ -172,7 +171,8 @@ import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
@@ -224,7 +224,7 @@ public class HRegionServer extends HasThread implements
// Time to pause if master says 'please hold'. Make configurable if needed.
private static final int INIT_PAUSE_TIME_MS = 1000;
- private static final Log LOG = LogFactory.getLog(HRegionServer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HRegionServer.class);
/**
* For testing only! Set to true to skip notifying region assignment to master .
@@ -1663,7 +1663,7 @@ public class HRegionServer extends HasThread implements
.setCurrentCompactedKVs(currentCompactedKVs)
.setDataLocality(dataLocality)
.setLastMajorCompactionTs(r.getOldestHfileTs(true));
- ((HRegion)r).setCompleteSequenceId(regionLoadBldr);
+ r.setCompleteSequenceId(regionLoadBldr);
return regionLoadBldr.build();
}
@@ -2198,7 +2198,7 @@ public class HRegionServer extends HasThread implements
@Override
public void postOpenDeployTasks(final PostOpenDeployContext context)
throws KeeperException, IOException {
- HRegion r = (HRegion) context.getRegion();
+ HRegion r = context.getRegion();
long masterSystemTime = context.getMasterSystemTime();
rpcServices.checkOpen();
LOG.info("Post open deploy tasks for " + r.getRegionInfo().getRegionNameAsString());
@@ -2223,7 +2223,7 @@ public class HRegionServer extends HasThread implements
+ r.getRegionInfo().getRegionNameAsString());
}
- triggerFlushInPrimaryRegion((HRegion)r);
+ triggerFlushInPrimaryRegion(r);
LOG.debug("Finished post open deploy task for " + r.getRegionInfo().getRegionNameAsString());
}
@@ -2373,15 +2373,15 @@ public class HRegionServer extends HasThread implements
public void abort(String reason, Throwable cause) {
String msg = "***** ABORTING region server " + this + ": " + reason + " *****";
if (cause != null) {
- LOG.fatal(msg, cause);
+ LOG.error(HBaseMarkers.FATAL, msg, cause);
} else {
- LOG.fatal(msg);
+ LOG.error(HBaseMarkers.FATAL, msg);
}
this.abortRequested = true;
// HBASE-4014: show list of coprocessors that were loaded to help debug
// regionserver crashes.Note that we're implicitly using
// java.util.HashSet's toString() method to print the coprocessor names.
- LOG.fatal("RegionServer abort: loaded coprocessors are: " +
+ LOG.error(HBaseMarkers.FATAL, "RegionServer abort: loaded coprocessors are: " +
CoprocessorHost.getLoadedCoprocessors());
// Try and dump metrics if abort -- might give clue as to how fatal came about....
try {
@@ -2631,7 +2631,8 @@ public class HRegionServer extends HasThread implements
} catch (ServiceException se) {
IOException ioe = ProtobufUtil.getRemoteException(se);
if (ioe instanceof ClockOutOfSyncException) {
- LOG.fatal("Master rejected startup because clock is out of sync", ioe);
+ LOG.error(HBaseMarkers.FATAL, "Master rejected startup because clock is out of sync",
+ ioe);
// Re-throw IOE will cause RS to abort
throw ioe;
} else if (ioe instanceof ServerNotRunningYetException) {
@@ -3197,7 +3198,7 @@ public class HRegionServer extends HasThread implements
Map> hstoreFiles = null;
Exception exceptionToThrow = null;
try{
- hstoreFiles = ((HRegion)regionToClose).close(false);
+ hstoreFiles = regionToClose.close(false);
} catch (Exception e) {
exceptionToThrow = e;
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.java
index c2e1111e19..d3509c2dbd 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.java
@@ -18,11 +18,10 @@
*/
package org.apache.hadoop.hbase.regionserver;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
import org.apache.hadoop.hbase.trace.TraceUtil;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.LocalHBaseCluster;
@@ -34,7 +33,7 @@ import org.apache.hadoop.hbase.util.ServerCommandLine;
*/
@InterfaceAudience.Private
public class HRegionServerCommandLine extends ServerCommandLine {
- private static final Log LOG = LogFactory.getLog(HRegionServerCommandLine.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HRegionServerCommandLine.class);
private final Class extends HRegionServer> regionServerClass;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
index 80f91c8b68..a5d4b4d2e3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
@@ -48,8 +48,7 @@ import java.util.function.Predicate;
import java.util.function.ToLongFunction;
import java.util.stream.Collectors;
import java.util.stream.LongStream;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -77,6 +76,7 @@ import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder;
import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl;
import org.apache.hadoop.hbase.io.hfile.HFileScanner;
import org.apache.hadoop.hbase.io.hfile.InvalidHFileException;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
@@ -98,7 +98,8 @@ import org.apache.hadoop.hbase.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableCollection;
@@ -133,7 +134,7 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat
public static final int DEFAULT_COMPACTCHECKER_INTERVAL_MULTIPLIER = 1000;
public static final int DEFAULT_BLOCKING_STOREFILE_COUNT = 10;
- private static final Log LOG = LogFactory.getLog(HStore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HStore.class);
protected final MemStore memstore;
// This stores directory in the filesystem.
@@ -2221,7 +2222,8 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat
try {
sf.deleteStoreFile();
} catch (IOException deleteEx) {
- LOG.fatal("Failed to delete store file we committed, halting " + pathToDelete, ex);
+ LOG.error(HBaseMarkers.FATAL, "Failed to delete store file we committed, "
+ + "halting " + pathToDelete, ex);
Runtime.getRuntime().halt(1);
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java
index b405c86aab..93e59cfe1f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java
@@ -26,8 +26,6 @@ import java.util.OptionalLong;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -41,7 +39,8 @@ import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.util.BloomFilterFactory;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -60,7 +59,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
@InterfaceAudience.Private
public class HStoreFile implements StoreFile {
- private static final Log LOG = LogFactory.getLog(HStoreFile.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(HStoreFile.class.getName());
public static final String STORE_FILE_READER_NO_READAHEAD = "hbase.store.reader.no-readahead";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemoryManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemoryManager.java
index cfdb32dd8d..abd9b46133 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemoryManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemoryManager.java
@@ -25,14 +25,14 @@ import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ChoreService;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.Server;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.ResizableBlockCache;
import org.apache.hadoop.hbase.io.util.MemorySizeUtil;
@@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public class HeapMemoryManager {
- private static final Log LOG = LogFactory.getLog(HeapMemoryManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HeapMemoryManager.class);
private static final int CONVERT_TO_PERCENTAGE = 100;
private static final int CLUSTER_MINIMUM_MEMORY_THRESHOLD =
(int) (CONVERT_TO_PERCENTAGE * HConstants.HBASE_CLUSTER_MINIMUM_MEMORY_THRESHOLD);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.java
index 3164e1c83f..19a63b45d2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.java
@@ -20,12 +20,12 @@ package org.apache.hadoop.hbase.regionserver;
import java.io.IOException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.procedure2.util.StringUtils;
@@ -45,8 +45,9 @@ import org.apache.hadoop.hbase.procedure2.util.StringUtils;
*/
@InterfaceAudience.Private
public class IncreasingToUpperBoundRegionSplitPolicy extends ConstantSizeRegionSplitPolicy {
+ private static final Logger LOG =
+ LoggerFactory.getLogger(IncreasingToUpperBoundRegionSplitPolicy.class);
- private static final Log LOG = LogFactory.getLog(IncreasingToUpperBoundRegionSplitPolicy.class);
protected long initialSize;
@Override
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyPrefixRegionSplitPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyPrefixRegionSplitPolicy.java
index 634bd88e0c..660da57080 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyPrefixRegionSplitPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyPrefixRegionSplitPolicy.java
@@ -19,9 +19,9 @@ package org.apache.hadoop.hbase.regionserver;
import java.util.Arrays;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A custom RegionSplitPolicy implementing a SplitPolicy that groups
@@ -32,8 +32,8 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class KeyPrefixRegionSplitPolicy extends IncreasingToUpperBoundRegionSplitPolicy {
- private static final Log LOG = LogFactory
- .getLog(KeyPrefixRegionSplitPolicy.class);
+ private static final Logger LOG = LoggerFactory
+ .getLogger(KeyPrefixRegionSplitPolicy.class);
@Deprecated
public static final String PREFIX_LENGTH_KEY_DEPRECATED = "prefix_split_key_policy.prefix_length";
public static final String PREFIX_LENGTH_KEY = "KeyPrefixRegionSplitPolicy.prefix_length";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java
index f26575d6c3..cdd92a6e10 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java
@@ -27,11 +27,11 @@ import java.util.Comparator;
import java.util.List;
import java.util.PriorityQueue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;
/**
@@ -49,7 +49,7 @@ import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;
@InterfaceAudience.Private
public class KeyValueHeap extends NonReversedNonLazyKeyValueScanner
implements KeyValueScanner, InternalScanner {
- private static final Log LOG = LogFactory.getLog(KeyValueHeap.class);
+ private static final Logger LOG = LoggerFactory.getLogger(KeyValueHeap.class);
protected PriorityQueue heap = null;
// Holds the scanners when a ever a eager close() happens. All such eagerly closed
// scans are collected and when the final scanner.close() happens will perform the
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Leases.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Leases.java
index a91a27156a..f7ee4ef32e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Leases.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Leases.java
@@ -18,12 +18,7 @@
*/
package org.apache.hadoop.hbase.regionserver;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-import org.apache.hadoop.hbase.util.HasThread;
-
+import java.io.IOException;
import java.util.ConcurrentModificationException;
import java.util.Iterator;
import java.util.Map;
@@ -31,7 +26,12 @@ import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Delayed;
import java.util.concurrent.TimeUnit;
-import java.io.IOException;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.hadoop.hbase.util.HasThread;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Leases
@@ -54,7 +54,7 @@ import java.io.IOException;
*/
@InterfaceAudience.Private
public class Leases extends HasThread {
- private static final Log LOG = LogFactory.getLog(Leases.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(Leases.class.getName());
public static final int MIN_WAIT_TIME = 100;
private final Map leases = new ConcurrentHashMap<>();
@@ -63,7 +63,7 @@ public class Leases extends HasThread {
/**
* Creates a lease monitor
- *
+ *
* @param leaseCheckFrequency - how often the lease should be checked
* (milliseconds)
*/
@@ -98,7 +98,7 @@ public class Leases extends HasThread {
} catch (ConcurrentModificationException e) {
continue;
} catch (Throwable e) {
- LOG.fatal("Unexpected exception killed leases thread", e);
+ LOG.error(HBaseMarkers.FATAL, "Unexpected exception killed leases thread", e);
break;
}
@@ -291,11 +291,13 @@ public class Leases extends HasThread {
return this.leaseName.hashCode();
}
+ @Override
public long getDelay(TimeUnit unit) {
return unit.convert(this.expirationTime - EnvironmentEdgeManager.currentTime(),
TimeUnit.MILLISECONDS);
}
+ @Override
public int compareTo(Delayed o) {
long delta = this.getDelay(TimeUnit.MILLISECONDS) -
o.getDelay(TimeUnit.MILLISECONDS);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
index 451b886985..d4561ed895 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
@@ -25,8 +25,6 @@ import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.locks.ReentrantLock;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;
@@ -37,7 +35,8 @@ import org.apache.hadoop.hbase.util.HasThread;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -52,7 +51,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
@InterfaceAudience.Private
@VisibleForTesting
public class LogRoller extends HasThread implements Closeable {
- private static final Log LOG = LogFactory.getLog(LogRoller.class);
+ private static final Logger LOG = LoggerFactory.getLogger(LogRoller.class);
private final ReentrantLock rollLock = new ReentrantLock();
private final AtomicBoolean rollLog = new AtomicBoolean(false);
private final ConcurrentHashMap walNeedsRoll = new ConcurrentHashMap<>();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.java
index b262328d72..42302b2a5c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.java
@@ -18,10 +18,10 @@
*/
package org.apache.hadoop.hbase.regionserver;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* MemStoreCompactionStrategy is the root of a class hierarchy which defines the strategy for
@@ -38,7 +38,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public abstract class MemStoreCompactionStrategy {
- protected static final Log LOG = LogFactory.getLog(MemStoreCompactionStrategy.class);
+ protected static final Logger LOG = LoggerFactory.getLogger(MemStoreCompactionStrategy.class);
// The upper bound for the number of segments we store in the pipeline prior to merging.
public static final String COMPACTING_MEMSTORE_THRESHOLD_KEY =
"hbase.hregion.compacting.pipeline.segments.limit";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.java
index 4d97411a77..300218e16f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.java
@@ -18,19 +18,20 @@
*/
package org.apache.hadoop.hbase.regionserver;
-import org.apache.hadoop.hbase.exceptions.IllegalArgumentIOException;
-import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import java.io.IOException;
+import java.util.concurrent.atomic.AtomicBoolean;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.MemoryCompactionPolicy;
-import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.hadoop.hbase.exceptions.IllegalArgumentIOException;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
-import java.io.IOException;
-import java.util.concurrent.atomic.AtomicBoolean;
+import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
* The ongoing MemStore Compaction manager, dispatches a solo running compaction and interrupts
@@ -55,7 +56,7 @@ public class MemStoreCompactor {
+ ClassSize.ATOMIC_BOOLEAN // isInterrupted (the internals)
);
- private static final Log LOG = LogFactory.getLog(MemStoreCompactor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MemStoreCompactor.class);
private CompactingMemStore compactingMemStore;
// a static version of the segment list from the pipeline
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactorSegmentsIterator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactorSegmentsIterator.java
index 0f96936d98..b081d77330 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactorSegmentsIterator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactorSegmentsIterator.java
@@ -25,14 +25,13 @@ import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.coprocessor.CoprocessorException;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.io.Closeables;
/**
@@ -41,8 +40,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.io.Closeables;
*/
@InterfaceAudience.Private
public class MemStoreCompactorSegmentsIterator extends MemStoreSegmentsIterator {
-
- private static final Log LOG = LogFactory.getLog(MemStoreCompactorSegmentsIterator.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(MemStoreCompactorSegmentsIterator.class);
private final List kvs = new ArrayList<>();
private boolean hasMore = true;
@@ -84,6 +83,7 @@ public class MemStoreCompactorSegmentsIterator extends MemStoreSegmentsIterator
return kvsIterator.next();
}
+ @Override
public void close() {
try {
compactingScanner.close();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java
index a314848cab..9e352ef428 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java
@@ -37,8 +37,6 @@ import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.LongAdder;
import java.util.concurrent.locks.ReentrantReadWriteLock;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.DroppedSnapshotException;
import org.apache.hadoop.hbase.HConstants;
@@ -55,6 +53,8 @@ import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix;
import org.apache.htrace.core.TraceScope;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Thread that flushes cache on request
@@ -67,7 +67,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
class MemStoreFlusher implements FlushRequester {
- private static final Log LOG = LogFactory.getLog(MemStoreFlusher.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MemStoreFlusher.class);
private Configuration conf;
// These two data members go together. Any entry in the one must have
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLABImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLABImpl.java
index 08588d26e8..6a253faf36 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLABImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLABImpl.java
@@ -28,14 +28,13 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReentrantLock;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.ExtendedCell;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
/**
@@ -66,7 +65,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
@InterfaceAudience.Private
public class MemStoreLABImpl implements MemStoreLAB {
- static final Log LOG = LogFactory.getLog(MemStoreLABImpl.class);
+ static final Logger LOG = LoggerFactory.getLogger(MemStoreLABImpl.class);
private AtomicReference curChunk = new AtomicReference<>();
// Lock to manage multiple handlers requesting for a chunk
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java
index f65bb66a9e..b643ecfb37 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java
@@ -26,8 +26,6 @@ import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HDFSBlocksDistribution;
@@ -46,6 +44,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.hadoop.hdfs.DFSHedgedReadMetrics;
import org.apache.hadoop.metrics2.MetricsExecutor;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Impl for exposing HRegionServer Information through Hadoop's metrics 2 system.
@@ -54,7 +54,7 @@ import org.apache.yetus.audience.InterfaceAudience;
class MetricsRegionServerWrapperImpl
implements MetricsRegionServerWrapper {
- private static final Log LOG = LogFactory.getLog(MetricsRegionServerWrapperImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MetricsRegionServerWrapperImpl.class);
private final HRegionServer regionServer;
private final MetricsWALSource metricsWALSource;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java
index 7f37bbf9b2..2aa1a82dc0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java
@@ -27,19 +27,19 @@ import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.metrics2.MetricsExecutor;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class MetricsRegionWrapperImpl implements MetricsRegionWrapper, Closeable {
- private static final Log LOG = LogFactory.getLog(MetricsRegionWrapperImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MetricsRegionWrapperImpl.class);
public static final int PERIOD = 45;
public static final String UNKNOWN = "unknown";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiVersionConcurrencyControl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiVersionConcurrencyControl.java
index 07ff281ce2..a25ef3b6c2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiVersionConcurrencyControl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiVersionConcurrencyControl.java
@@ -24,9 +24,9 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects;
import java.util.LinkedList;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
@@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.util.ClassSize;
*/
@InterfaceAudience.Private
public class MultiVersionConcurrencyControl {
- private static final Log LOG = LogFactory.getLog(MultiVersionConcurrencyControl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MultiVersionConcurrencyControl.class);
final AtomicLong readPoint = new AtomicLong(0);
final AtomicLong writePoint = new AtomicLong(0);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
index a77856aaa6..ccf61568f0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
@@ -44,8 +44,6 @@ import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.LongAdder;
import org.apache.commons.lang3.mutable.MutableObject;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ByteBufferCell;
@@ -99,6 +97,7 @@ import org.apache.hadoop.hbase.ipc.RpcServerFactory;
import org.apache.hadoop.hbase.ipc.RpcServerInterface;
import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
import org.apache.hadoop.hbase.ipc.ServerRpcController;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.master.MasterRpcServices;
import org.apache.hadoop.hbase.quotas.ActivePolicyEnforcement;
import org.apache.hadoop.hbase.quotas.OperationQuota;
@@ -129,6 +128,8 @@ import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.wal.WALSplitter;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.cache.Cache;
@@ -234,7 +235,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDe
public class RSRpcServices implements HBaseRPCErrorHandler,
AdminService.BlockingInterface, ClientService.BlockingInterface, PriorityFunction,
ConfigurationObserver {
- protected static final Log LOG = LogFactory.getLog(RSRpcServices.class);
+ protected static final Logger LOG = LoggerFactory.getLogger(RSRpcServices.class);
/** RPC scheduler to use for the region server. */
public static final String REGION_SERVER_RPC_SCHEDULER_FACTORY_CLASS =
@@ -793,7 +794,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
try {
Get get = ProtobufUtil.toGet(action.getGet());
if (context != null) {
- r = get(get, ((HRegion) region), closeCallBack, context);
+ r = get(get, (region), closeCallBack, context);
} else {
r = region.get(get);
}
@@ -1051,7 +1052,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
for (Cell metaCell : metaCells) {
CompactionDescriptor compactionDesc = WALEdit.getCompaction(metaCell);
boolean isDefaultReplica = RegionReplicaUtil.isDefaultReplica(region.getRegionInfo());
- HRegion hRegion = (HRegion)region;
+ HRegion hRegion = region;
if (compactionDesc != null) {
// replay the compaction. Remove the files from stores only if we are the primary
// region replica (thus own the files)
@@ -1485,8 +1486,9 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
|| (e.getMessage() != null && e.getMessage().contains(
"java.lang.OutOfMemoryError"))) {
stop = true;
- LOG.fatal("Run out of memory; " + RSRpcServices.class.getSimpleName()
- + " will abort itself immediately", e);
+ LOG.error(HBaseMarkers.FATAL, "Run out of memory; "
+ + RSRpcServices.class.getSimpleName() + " will abort itself immediately",
+ e);
}
} finally {
if (stop) {
@@ -1551,7 +1553,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
try {
checkOpen();
requestCount.increment();
- HRegion region = (HRegion) getRegion(request.getRegion());
+ HRegion region = getRegion(request.getRegion());
// Quota support is enabled, the requesting user is not system/super user
// and a quota policy is enforced that disables compactions.
if (QuotaUtil.isQuotaEnabled(getConfiguration()) &&
@@ -1598,7 +1600,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
try {
checkOpen();
requestCount.increment();
- HRegion region = (HRegion) getRegion(request.getRegion());
+ HRegion region = getRegion(request.getRegion());
LOG.info("Flushing " + region.getRegionInfo().getRegionNameAsString());
boolean shouldFlush = true;
if (request.hasIfOlderThanTs()) {
@@ -1663,7 +1665,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
RegionInfo info = region.getRegionInfo();
byte[] bestSplitRow = null;
if (request.hasBestSplitRow() && request.getBestSplitRow()) {
- HRegion r = (HRegion) region;
+ HRegion r = region;
region.startRegionOperation(Operation.SPLIT_REGION);
r.forceSplit(null);
bestSplitRow = r.checkSplit();
@@ -2371,7 +2373,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
}
if (existence == null) {
if (context != null) {
- r = get(clientGet, ((HRegion) region), null, context);
+ r = get(clientGet, (region), null, context);
} else {
// for test purpose
r = region.get(clientGet);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
index 1717093ca9..fb87e51f74 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
@@ -32,8 +32,6 @@ import java.util.regex.Matcher;
import org.apache.commons.collections4.map.AbstractReferenceMap;
import org.apache.commons.collections4.map.ReferenceMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -90,6 +88,8 @@ import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Implements the coprocessor environment and runtime support for coprocessors
@@ -99,7 +99,7 @@ import org.apache.yetus.audience.InterfaceAudience;
public class RegionCoprocessorHost
extends CoprocessorHost {
- private static final Log LOG = LogFactory.getLog(RegionCoprocessorHost.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionCoprocessorHost.class);
// The shared data map
private static final ReferenceMap> SHARED_DATA_MAP =
new ReferenceMap<>(AbstractReferenceMap.ReferenceStrength.HARD,
@@ -141,6 +141,7 @@ public class RegionCoprocessorHost
return region;
}
+ @Override
public OnlineRegions getOnlineRegions() {
return this.services;
}
@@ -208,6 +209,7 @@ public class RegionCoprocessorHost
* @return An instance of RegionServerServices, an object NOT for general user-space Coprocessor
* consumption.
*/
+ @Override
public RegionServerServices getRegionServerServices() {
return this.rsServices;
}
@@ -551,7 +553,7 @@ public class RegionCoprocessorHost
}
});
} catch (IOException e) {
- LOG.warn(e);
+ LOG.warn(e.toString(), e);
}
}
@@ -586,7 +588,7 @@ public class RegionCoprocessorHost
}
});
} catch (IOException e) {
- LOG.warn(e);
+ LOG.warn(e.toString(), e);
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java
index 9395b2e1c6..dc1708cfbc 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java
@@ -21,8 +21,7 @@ package org.apache.hadoop.hbase.regionserver;
import java.io.IOException;
import com.google.protobuf.Service;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.SharedConnection;
@@ -41,12 +40,14 @@ import org.apache.hadoop.hbase.metrics.MetricRegistry;
import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
import org.apache.hadoop.hbase.security.User;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class RegionServerCoprocessorHost extends
CoprocessorHost {
- private static final Log LOG = LogFactory.getLog(RegionServerCoprocessorHost.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionServerCoprocessorHost.class);
private RegionServerServices rsServices;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.java
index 89847f977a..264d9858de 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.java
@@ -26,8 +26,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -53,7 +51,8 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest;
@@ -94,7 +93,7 @@ public class SecureBulkLoadManager {
private static final int RANDOM_WIDTH = 320;
private static final int RANDOM_RADIX = 32;
- private static final Log LOG = LogFactory.getLog(SecureBulkLoadManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SecureBulkLoadManager.class);
private final static FsPermission PERM_ALL_ACCESS = FsPermission.valueOf("-rwxrwxrwx");
private final static FsPermission PERM_HIDDEN = FsPermission.valueOf("-rwx--x--x");
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Segment.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Segment.java
index c054666395..121cbcae62 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Segment.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Segment.java
@@ -21,11 +21,11 @@ package org.apache.hadoop.hbase.regionserver;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
+import java.util.Objects;
import java.util.SortedSet;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
-import org.apache.commons.logging.Log;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.PrivateCellUtil;
@@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -343,9 +343,9 @@ public abstract class Segment {
/**
* Dumps all cells of the segment into the given log
*/
- void dump(Log log) {
+ void dump(Logger log) {
for (Cell cell: getCellSet()) {
- log.debug(cell);
+ log.debug(Objects.toString(cell));
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java
index fe6d01961e..1b93df936b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java
@@ -23,13 +23,13 @@ import java.util.Date;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.NonceKey;
@@ -42,7 +42,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
@InterfaceAudience.Private
public class ServerNonceManager {
public static final String HASH_NONCE_GRACE_PERIOD_KEY = "hbase.server.hashNonce.gracePeriod";
- private static final Log LOG = LogFactory.getLog(ServerNonceManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ServerNonceManager.class);
/** The time to wait in an extremely unlikely case of a conflict with a running op.
* Only here so that tests could override it and not wait. */
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ShutdownHook.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ShutdownHook.java
index 58d6327a05..c4335f66d5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ShutdownHook.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ShutdownHook.java
@@ -23,13 +23,14 @@ import java.lang.reflect.Field;
import java.util.HashMap;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.Stoppable;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.util.ShutdownHookManager;
import org.apache.hadoop.hbase.util.Threads;
@@ -39,7 +40,7 @@ import org.apache.hadoop.hbase.util.Threads;
*/
@InterfaceAudience.Private
public class ShutdownHook {
- private static final Log LOG = LogFactory.getLog(ShutdownHook.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ShutdownHook.class);
private static final String CLIENT_FINALIZER_DATA_METHOD = "clientFinalizer";
/**
@@ -211,10 +212,12 @@ public class ShutdownHook {
}
return hdfsClientFinalizer;
} catch (NoSuchFieldException nsfe) {
- LOG.fatal("Couldn't find field 'clientFinalizer' in FileSystem!", nsfe);
+ LOG.error(HBaseMarkers.FATAL, "Couldn't find field 'clientFinalizer' in FileSystem!",
+ nsfe);
throw new RuntimeException("Failed to suppress HDFS shutdown hook");
} catch (IllegalAccessException iae) {
- LOG.fatal("Couldn't access field 'clientFinalizer' in FileSystem!", iae);
+ LOG.error(HBaseMarkers.FATAL, "Couldn't access field 'clientFinalizer' in FileSystem!",
+ iae);
throw new RuntimeException("Failed to suppress HDFS shutdown hook");
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java
index 924b8fe4c9..067ad920d5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java
@@ -23,9 +23,9 @@ import java.io.InterruptedIOException;
import java.net.ConnectException;
import java.net.SocketTimeoutException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -61,7 +61,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
@InterfaceAudience.Private
public class SplitLogWorker implements Runnable {
- private static final Log LOG = LogFactory.getLog(SplitLogWorker.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SplitLogWorker.class);
Thread worker;
// thread pool which executes recovery work
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitRequest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitRequest.java
index 4b1ae31233..ce5c05119a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitRequest.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitRequest.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.regionserver;
import java.security.PrivilegedAction;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -30,7 +28,8 @@ import org.apache.hadoop.hbase.regionserver.RegionServerServices.RegionStateTran
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
@@ -39,7 +38,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProto
*/
@InterfaceAudience.Private
class SplitRequest implements Runnable {
- private static final Log LOG = LogFactory.getLog(SplitRequest.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SplitRequest.class);
private final RegionInfo parent;
private final byte[] midKey;
private final HRegionServer server;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java
index 5b83acee4f..a1fe2d1e22 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java
@@ -25,14 +25,14 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HDFSBlocksDistribution;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
import org.apache.hadoop.hbase.io.HFileLink;
import org.apache.hadoop.hbase.io.HalfStoreFileReader;
@@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.util.FSUtils;
*/
@InterfaceAudience.Private
public class StoreFileInfo {
- private static final Log LOG = LogFactory.getLog(StoreFileInfo.class);
+ private static final Logger LOG = LoggerFactory.getLogger(StoreFileInfo.class);
/**
* A non-capture group, for hfiles, so that this can be embedded.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java
index a9d9292c18..924e285700 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java
@@ -28,8 +28,6 @@ import java.util.Optional;
import java.util.SortedSet;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -53,7 +51,8 @@ import org.apache.hadoop.hbase.util.BloomFilterFactory;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -62,7 +61,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.PHOENIX)
@InterfaceStability.Evolving
public class StoreFileReader {
- private static final Log LOG = LogFactory.getLog(StoreFileReader.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(StoreFileReader.class.getName());
protected BloomFilter generalBloomFilter = null;
protected BloomFilter deleteFamilyBloomFilter = null;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java
index 26977e4060..ecc812e14d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java
@@ -30,8 +30,6 @@ import java.net.InetSocketAddress;
import java.util.UUID;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -52,7 +50,8 @@ import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.RowBloomContext;
import org.apache.hadoop.hbase.util.RowColBloomContext;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
/**
@@ -61,7 +60,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
*/
@InterfaceAudience.Private
public class StoreFileWriter implements CellSink, ShipperListener {
- private static final Log LOG = LogFactory.getLog(StoreFileWriter.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(StoreFileWriter.class.getName());
private static final Pattern dash = Pattern.compile("-");
private final BloomFilterWriter generalBloomFilterWriter;
private final BloomFilterWriter deleteFamilyBloomFilterWriter;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
index 571e2c0e3c..bdcf046c43 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
@@ -27,8 +27,6 @@ import java.util.OptionalInt;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.locks.ReentrantLock;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
@@ -51,7 +49,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.util.CollectionUtils;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -65,7 +64,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
@InterfaceAudience.Private
public class StoreScanner extends NonReversedNonLazyKeyValueScanner
implements KeyValueScanner, InternalScanner, ChangedReadersObserver {
- private static final Log LOG = LogFactory.getLog(StoreScanner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(StoreScanner.class);
// In unit tests, the store could be null
protected final HStore store;
private final CellComparator comparator;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreUtils.java
index 2ada5a99f9..a32a49302b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreUtils.java
@@ -24,12 +24,12 @@ import java.util.Optional;
import java.util.OptionalInt;
import java.util.OptionalLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Utility functions for region server storage layer.
@@ -37,7 +37,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class StoreUtils {
- private static final Log LOG = LogFactory.getLog(StoreUtils.class);
+ private static final Logger LOG = LoggerFactory.getLogger(StoreUtils.class);
/**
* Creates a deterministic hash code for store file collection.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StorefileRefresherChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StorefileRefresherChore.java
index 576aea1f8f..18f7e185ee 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StorefileRefresherChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StorefileRefresherChore.java
@@ -23,11 +23,11 @@ import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.master.cleaner.TimeToLiveHFileCleaner;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.util.StringUtils;
@@ -44,7 +44,7 @@ import org.apache.hadoop.util.StringUtils;
@InterfaceAudience.Private
public class StorefileRefresherChore extends ScheduledChore {
- private static final Log LOG = LogFactory.getLog(StorefileRefresherChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(StorefileRefresherChore.class);
/**
* The period (in milliseconds) for refreshing the store files for the secondary regions.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeMultiFileWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeMultiFileWriter.java
index 732fc061e8..fc0598d89a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeMultiFileWriter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeMultiFileWriter.java
@@ -24,13 +24,13 @@ import java.util.Collection;
import java.util.Collections;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.Bytes;
/**
@@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.util.Bytes;
@InterfaceAudience.Private
public abstract class StripeMultiFileWriter extends AbstractMultiFileWriter {
- private static final Log LOG = LogFactory.getLog(StripeMultiFileWriter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(StripeMultiFileWriter.class);
protected final CellComparator comparator;
protected List existingWriters;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreConfig.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreConfig.java
index eb2a9b6d96..61deb0b93c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreConfig.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreConfig.java
@@ -18,9 +18,9 @@
*/
package org.apache.hadoop.hbase.regionserver;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration;
@@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration;
*/
@InterfaceAudience.Private
public class StripeStoreConfig {
- private static final Log LOG = LogFactory.getLog(StripeStoreConfig.class);
+ private static final Logger LOG = LoggerFactory.getLogger(StripeStoreConfig.class);
/** The maximum number of files to compact within a stripe; same as for regular compaction. */
public static final String MAX_FILES_KEY = "hbase.store.stripe.compaction.maxFiles";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java
index 8c2636355c..03874e1d4c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java
@@ -21,14 +21,14 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequestImpl;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;
import org.apache.hadoop.hbase.regionserver.compactions.StripeCompactionPolicy;
import org.apache.hadoop.hbase.regionserver.compactions.StripeCompactor;
@@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class StripeStoreEngine extends StoreEngine {
- private static final Log LOG = LogFactory.getLog(StripeStoreEngine.class);
+ private static final Logger LOG = LoggerFactory.getLogger(StripeStoreEngine.class);
private StripeStoreConfig config;
@Override
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java
index 737e1a6df5..6a5e84c74a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java
@@ -31,8 +31,6 @@ import java.util.Map;
import java.util.Optional;
import java.util.TreeMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
@@ -44,7 +42,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ConcatenatedLists;
import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableCollection;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList;
@@ -67,7 +66,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList;
@InterfaceAudience.Private
public class StripeStoreFileManager
implements StoreFileManager, StripeCompactionPolicy.StripeInformationProvider {
- private static final Log LOG = LogFactory.getLog(StripeStoreFileManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(StripeStoreFileManager.class);
/**
* The file metadata fields that contain the stripe information.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java
index a227979e57..d2333451e3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java
@@ -24,8 +24,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CellComparator;
@@ -33,7 +31,8 @@ import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.regionserver.compactions.StripeCompactionPolicy;
import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -42,7 +41,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Private
public class StripeStoreFlusher extends StoreFlusher {
- private static final Log LOG = LogFactory.getLog(StripeStoreFlusher.class);
+ private static final Logger LOG = LoggerFactory.getLogger(StripeStoreFlusher.class);
private final Object flushLock = new Object();
private final StripeCompactionPolicy policy;
private final StripeCompactionPolicy.StripeInformationProvider stripes;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/AbstractMultiOutputCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/AbstractMultiOutputCompactor.java
index c5ef127866..a8ffc2e4ea 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/AbstractMultiOutputCompactor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/AbstractMultiOutputCompactor.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.regionserver.compactions;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -31,6 +29,8 @@ import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.regionserver.StoreFileWriter;
import org.apache.hadoop.hbase.regionserver.StoreScanner;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Base class for implementing a Compactor which will generate multiple output files after
@@ -40,7 +40,7 @@ import org.apache.yetus.audience.InterfaceAudience;
public abstract class AbstractMultiOutputCompactor
extends Compactor {
- private static final Log LOG = LogFactory.getLog(AbstractMultiOutputCompactor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AbstractMultiOutputCompactor.class);
public AbstractMultiOutputCompactor(Configuration conf, HStore store) {
super(conf, store);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionConfiguration.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionConfiguration.java
index b8194eb55b..d2a86c1c72 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionConfiguration.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionConfiguration.java
@@ -19,11 +19,11 @@
package org.apache.hadoop.hbase.regionserver.compactions;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.StoreConfigInformation;
/**
@@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.regionserver.StoreConfigInformation;
@InterfaceAudience.Private
public class CompactionConfiguration {
- private static final Log LOG = LogFactory.getLog(CompactionConfiguration.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CompactionConfiguration.class);
public static final String HBASE_HSTORE_COMPACTION_RATIO_KEY = "hbase.hstore.compaction.ratio";
public static final String HBASE_HSTORE_COMPACTION_RATIO_OFFPEAK_KEY =
@@ -142,7 +142,7 @@ public class CompactionConfiguration {
this.dateTieredCompactionWindowFactory = conf.get(
DATE_TIERED_COMPACTION_WINDOW_FACTORY_CLASS_KEY,
DEFAULT_DATE_TIERED_COMPACTION_WINDOW_FACTORY_CLASS.getName());
- LOG.info(this);
+ LOG.info(toString());
}
@Override
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
index 014d4d1daf..9703c3b528 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
@@ -29,8 +29,6 @@ import java.util.Collection;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
@@ -62,7 +60,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.io.Closeables;
/**
@@ -71,7 +70,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.io.Closeables;
*/
@InterfaceAudience.Private
public abstract class Compactor {
- private static final Log LOG = LogFactory.getLog(Compactor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Compactor.class);
protected static final long COMPACTION_PROGRESS_LOG_INTERVAL = 60 * 1000;
protected volatile CompactionProgress progress;
protected final Configuration conf;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java
index a0c3e309c9..a6f1b9eb4b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java
@@ -25,8 +25,6 @@ import java.util.Collections;
import java.util.List;
import java.util.OptionalLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HDFSBlocksDistribution;
@@ -38,7 +36,8 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.ReflectionUtils;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterators;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@@ -65,7 +64,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.math.LongMath;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class DateTieredCompactionPolicy extends SortedCompactionPolicy {
- private static final Log LOG = LogFactory.getLog(DateTieredCompactionPolicy.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DateTieredCompactionPolicy.class);
private final RatioBasedCompactionPolicy compactionPolicyPerWindow;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactor.java
index 09dda90d90..21eaa941cd 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactor.java
@@ -21,8 +21,6 @@ import java.io.IOException;
import java.util.List;
import java.util.OptionalLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.regionserver.DateTieredMultiFileWriter;
@@ -32,6 +30,8 @@ import org.apache.hadoop.hbase.regionserver.StoreUtils;
import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
import org.apache.hadoop.hbase.security.User;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This compactor will generate StoreFile for different time ranges.
@@ -39,7 +39,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class DateTieredCompactor extends AbstractMultiOutputCompactor {
- private static final Log LOG = LogFactory.getLog(DateTieredCompactor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DateTieredCompactor.class);
public DateTieredCompactor(Configuration conf, HStore store) {
super(conf, store);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DefaultCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DefaultCompactor.java
index 14539b0e2e..41b819b5d5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DefaultCompactor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DefaultCompactor.java
@@ -21,8 +21,6 @@ import java.io.IOException;
import java.util.Collection;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.regionserver.HStore;
@@ -33,7 +31,8 @@ import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController
import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
import org.apache.hadoop.hbase.security.User;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
/**
@@ -42,7 +41,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
*/
@InterfaceAudience.Private
public class DefaultCompactor extends Compactor {
- private static final Log LOG = LogFactory.getLog(DefaultCompactor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DefaultCompactor.class);
public DefaultCompactor(Configuration conf, HStore store) {
super(conf, store);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExploringCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExploringCompactionPolicy.java
index b0942f6089..d9d10d98ae 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExploringCompactionPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExploringCompactionPolicy.java
@@ -23,12 +23,12 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.regionserver.HStoreFile;
import org.apache.hadoop.hbase.regionserver.StoreConfigInformation;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Class to pick which files if any to compact together.
@@ -38,7 +38,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class ExploringCompactionPolicy extends RatioBasedCompactionPolicy {
- private static final Log LOG = LogFactory.getLog(ExploringCompactionPolicy.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ExploringCompactionPolicy.class);
/**
* Constructor for ExploringCompactionPolicy.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExponentialCompactionWindowFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExponentialCompactionWindowFactory.java
index 67c7a24aab..6d2245cd7b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExponentialCompactionWindowFactory.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExponentialCompactionWindowFactory.java
@@ -17,21 +17,21 @@
*/
package org.apache.hadoop.hbase.regionserver.compactions;
-import org.apache.hadoop.hbase.shaded.com.google.common.math.LongMath;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.hadoop.hbase.shaded.com.google.common.math.LongMath;
/**
* Exponential compaction window implementation.
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class ExponentialCompactionWindowFactory extends CompactionWindowFactory {
-
- private static final Log LOG = LogFactory.getLog(ExponentialCompactionWindowFactory.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(ExponentialCompactionWindowFactory.class);
public static final String BASE_WINDOW_MILLIS_KEY =
"hbase.hstore.compaction.date.tiered.base.window.millis";
@@ -128,7 +128,7 @@ public class ExponentialCompactionWindowFactory extends CompactionWindowFactory
windowsPerTier = conf.getInt(WINDOWS_PER_TIER_KEY, 4);
maxTierAgeMillis = conf.getLong(MAX_TIER_AGE_MILLIS_KEY,
comConf.getDateTieredMaxStoreFileAgeMillis());
- LOG.info(this);
+ LOG.info(toString());
}
@Override
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/FIFOCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/FIFOCompactionPolicy.java
index 032a9c614d..32b40e1b05 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/FIFOCompactionPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/FIFOCompactionPolicy.java
@@ -23,14 +23,14 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.regionserver.HStoreFile;
import org.apache.hadoop.hbase.regionserver.StoreConfigInformation;
import org.apache.hadoop.hbase.regionserver.StoreUtils;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
*
@@ -47,7 +47,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class FIFOCompactionPolicy extends ExploringCompactionPolicy {
- private static final Log LOG = LogFactory.getLog(FIFOCompactionPolicy.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FIFOCompactionPolicy.class);
public FIFOCompactionPolicy(Configuration conf, StoreConfigInformation storeConfigInfo) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/OffPeakHours.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/OffPeakHours.java
index 3cb8843952..b920de2b57 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/OffPeakHours.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/OffPeakHours.java
@@ -17,14 +17,14 @@
*/
package org.apache.hadoop.hbase.regionserver.compactions;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
@InterfaceAudience.Private
public abstract class OffPeakHours {
- private static final Log LOG = LogFactory.getLog(OffPeakHours.class);
+ private static final Logger LOG = LoggerFactory.getLogger(OffPeakHours.class);
public static final OffPeakHours DISABLED = new OffPeakHours() {
@Override public boolean isOffPeakHour() { return false; }
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
index dba0473419..a6ea9b22f4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
@@ -24,8 +24,6 @@ import java.util.Collection;
import java.util.List;
import java.util.OptionalLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.regionserver.HStore;
@@ -35,6 +33,8 @@ import org.apache.hadoop.hbase.regionserver.StoreConfigInformation;
import org.apache.hadoop.hbase.regionserver.StoreUtils;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* The default algorithm for selecting files for compaction.
@@ -43,7 +43,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class RatioBasedCompactionPolicy extends SortedCompactionPolicy {
- private static final Log LOG = LogFactory.getLog(RatioBasedCompactionPolicy.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RatioBasedCompactionPolicy.class);
public RatioBasedCompactionPolicy(Configuration conf,
StoreConfigInformation storeConfigInfo) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/SortedCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/SortedCompactionPolicy.java
index f284489eaa..d9b3dd45fd 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/SortedCompactionPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/SortedCompactionPolicy.java
@@ -17,14 +17,13 @@ import java.util.List;
import java.util.OptionalInt;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.regionserver.HStoreFile;
import org.apache.hadoop.hbase.regionserver.StoreConfigInformation;
import org.apache.hadoop.hbase.regionserver.StoreUtils;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Predicate;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Collections2;
@@ -36,7 +35,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@InterfaceAudience.Private
public abstract class SortedCompactionPolicy extends CompactionPolicy {
- private static final Log LOG = LogFactory.getLog(SortedCompactionPolicy.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SortedCompactionPolicy.class);
public SortedCompactionPolicy(Configuration conf, StoreConfigInformation storeConfigInfo) {
super(conf, storeConfigInfo);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java
index b6de6783bc..053920dd1c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java
@@ -25,8 +25,6 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CellComparator;
@@ -42,7 +40,8 @@ import org.apache.hadoop.hbase.util.ConcatenatedLists;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList;
/**
@@ -50,7 +49,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList;
*/
@InterfaceAudience.Private
public class StripeCompactionPolicy extends CompactionPolicy {
- private final static Log LOG = LogFactory.getLog(StripeCompactionPolicy.class);
+ private final static Logger LOG = LoggerFactory.getLogger(StripeCompactionPolicy.class);
// Policy used to compact individual stripes.
private ExploringCompactionPolicy stripePolicy = null;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactor.java
index c9e591ea43..41e0a71b49 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactor.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.regionserver.compactions;
import java.io.IOException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.regionserver.HStore;
@@ -34,6 +32,8 @@ import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This is the placeholder for stripe compactor. The implementation, as well as the proper javadoc,
@@ -41,7 +41,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class StripeCompactor extends AbstractMultiOutputCompactor {
- private static final Log LOG = LogFactory.getLog(StripeCompactor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(StripeCompactor.class);
public StripeCompactor(Configuration conf, HStore store) {
super(conf, store);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/CloseRegionHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/CloseRegionHandler.java
index f48ee9260e..7583b726af 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/CloseRegionHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/CloseRegionHandler.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.regionserver.handler;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.ServerName;
@@ -32,7 +30,8 @@ import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
import org.apache.hadoop.hbase.regionserver.RegionServerServices.RegionStateTransitionContext;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
/**
@@ -45,7 +44,7 @@ public class CloseRegionHandler extends EventHandler {
// after the user regions have closed. What
// about the case where master tells us to shutdown a catalog region and we
// have a running queue of user regions to close?
- private static final Log LOG = LogFactory.getLog(CloseRegionHandler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CloseRegionHandler.class);
private final RegionServerServices rsServices;
private final RegionInfo regionInfo;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.java
index e4b3ed2d09..f408629534 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.regionserver.handler;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -37,7 +35,8 @@ import org.apache.hadoop.hbase.regionserver.RegionServerServices.PostOpenDeployC
import org.apache.hadoop.hbase.regionserver.RegionServerServices.RegionStateTransitionContext;
import org.apache.hadoop.hbase.util.CancelableProgressable;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
/**
* Handles opening of a region on a region server.
@@ -46,7 +45,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProto
*/
@InterfaceAudience.Private
public class OpenRegionHandler extends EventHandler {
- private static final Log LOG = LogFactory.getLog(OpenRegionHandler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(OpenRegionHandler.class);
protected final RegionServerServices rsServices;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/ParallelSeekHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/ParallelSeekHandler.java
index 9f66be9ba6..ed1b2c760f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/ParallelSeekHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/ParallelSeekHandler.java
@@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.regionserver.handler;
import java.io.IOException;
import java.util.concurrent.CountDownLatch;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.executor.EventHandler;
import org.apache.hadoop.hbase.executor.EventType;
@@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.regionserver.KeyValueScanner;
*/
@InterfaceAudience.Private
public class ParallelSeekHandler extends EventHandler {
- private static final Log LOG = LogFactory.getLog(ParallelSeekHandler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ParallelSeekHandler.class);
private KeyValueScanner scanner;
private Cell keyValue;
private long readPoint;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/RegionReplicaFlushHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/RegionReplicaFlushHandler.java
index 0d13aafe08..b917379930 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/RegionReplicaFlushHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/RegionReplicaFlushHandler.java
@@ -21,13 +21,13 @@ package org.apache.hadoop.hbase.regionserver.handler;
import java.io.IOException;
import java.io.InterruptedIOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.ClusterConnection;
import org.apache.hadoop.hbase.client.FlushRegionCallable;
import org.apache.hadoop.hbase.client.RegionReplicaUtil;
@@ -54,7 +54,7 @@ import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
@InterfaceAudience.Private
public class RegionReplicaFlushHandler extends EventHandler {
- private static final Log LOG = LogFactory.getLog(RegionReplicaFlushHandler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionReplicaFlushHandler.class);
private final ClusterConnection connection;
private final RpcRetryingCallerFactory rpcRetryingCallerFactory;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/WALSplitterHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/WALSplitterHandler.java
index 07e7de0819..49ab574ec5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/WALSplitterHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/WALSplitterHandler.java
@@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.regionserver.handler;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.SplitLogCounters;
@@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.util.CancelableProgressable;
*/
@InterfaceAudience.Private
public class WALSplitterHandler extends EventHandler {
- private static final Log LOG = LogFactory.getLog(WALSplitterHandler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(WALSplitterHandler.class);
private final ServerName serverName;
private final CancelableProgressable reporter;
private final AtomicInteger inProgressTasks;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
index 13ab8c86e4..a20a001e27 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
@@ -21,10 +21,10 @@ import java.io.IOException;
import java.util.List;
import java.util.concurrent.Callable;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.IsolationLevel;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
@@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class FlushSnapshotSubprocedure extends Subprocedure {
- private static final Log LOG = LogFactory.getLog(FlushSnapshotSubprocedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FlushSnapshotSubprocedure.class);
private final List regions;
private final SnapshotDescription snapshot;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java
index 6a7d83ebe4..08335ab4f1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java
@@ -30,8 +30,6 @@ import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.DaemonThreadFactory;
@@ -58,6 +56,8 @@ import org.apache.hadoop.hbase.regionserver.RegionServerServices;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This manager class handles the work dealing with snapshots for a {@link HRegionServer}.
@@ -75,7 +75,7 @@ import org.apache.zookeeper.KeeperException;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
@InterfaceStability.Unstable
public class RegionServerSnapshotManager extends RegionServerProcedureManager {
- private static final Log LOG = LogFactory.getLog(RegionServerSnapshotManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionServerSnapshotManager.class);
/** Maximum number of snapshot region tasks that can run concurrently */
private static final String CONCURENT_SNAPSHOT_TASKS_KEY = "hbase.snapshot.region.concurrentTasks";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/CompactionThroughputControllerFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/CompactionThroughputControllerFactory.java
index 55def0792b..45e7267ed2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/CompactionThroughputControllerFactory.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/CompactionThroughputControllerFactory.java
@@ -17,18 +17,18 @@
*/
package org.apache.hadoop.hbase.regionserver.throttle;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
import org.apache.hadoop.util.ReflectionUtils;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public final class CompactionThroughputControllerFactory {
-
- private static final Log LOG = LogFactory.getLog(CompactionThroughputControllerFactory.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(CompactionThroughputControllerFactory.class);
public static final String HBASE_THROUGHPUT_CONTROLLER_KEY =
"hbase.regionserver.throughput.controller";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/FlushThroughputControllerFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/FlushThroughputControllerFactory.java
index 6311952bde..fc75c58358 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/FlushThroughputControllerFactory.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/FlushThroughputControllerFactory.java
@@ -17,18 +17,18 @@
*/
package org.apache.hadoop.hbase.regionserver.throttle;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
import org.apache.hadoop.util.ReflectionUtils;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public final class FlushThroughputControllerFactory {
- private static final Log LOG = LogFactory.getLog(FlushThroughputControllerFactory.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FlushThroughputControllerFactory.class);
public static final String HBASE_FLUSH_THROUGHPUT_CONTROLLER_KEY =
"hbase.regionserver.flush.throughput.controller";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareCompactionThroughputController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareCompactionThroughputController.java
index c56b47409f..b3c7bf37a2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareCompactionThroughputController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareCompactionThroughputController.java
@@ -17,12 +17,12 @@
*/
package org.apache.hadoop.hbase.regionserver.throttle;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
import org.apache.hadoop.hbase.regionserver.compactions.OffPeakHours;
@@ -42,8 +42,8 @@ import org.apache.hadoop.hbase.regionserver.compactions.OffPeakHours;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class PressureAwareCompactionThroughputController extends PressureAwareThroughputController {
- private final static Log LOG = LogFactory
- .getLog(PressureAwareCompactionThroughputController.class);
+ private final static Logger LOG = LoggerFactory
+ .getLogger(PressureAwareCompactionThroughputController.class);
public static final String HBASE_HSTORE_COMPACTION_MAX_THROUGHPUT_HIGHER_BOUND =
"hbase.hstore.compaction.throughput.higher.bound";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareFlushThroughputController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareFlushThroughputController.java
index bdfa99d11f..fac4e86e5d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareFlushThroughputController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareFlushThroughputController.java
@@ -17,12 +17,12 @@
*/
package org.apache.hadoop.hbase.regionserver.throttle;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
import org.apache.hadoop.hbase.regionserver.compactions.OffPeakHours;
@@ -40,7 +40,8 @@ import org.apache.hadoop.hbase.regionserver.compactions.OffPeakHours;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class PressureAwareFlushThroughputController extends PressureAwareThroughputController {
- private static final Log LOG = LogFactory.getLog(PressureAwareFlushThroughputController.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(PressureAwareFlushThroughputController.class);
public static final String HBASE_HSTORE_FLUSH_MAX_THROUGHPUT_UPPER_BOUND =
"hbase.hstore.flush.throughput.upper.bound";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareThroughputController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareThroughputController.java
index 78413361b4..ec90830c17 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareThroughputController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareThroughputController.java
@@ -20,12 +20,12 @@ package org.apache.hadoop.hbase.regionserver.throttle;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
import org.apache.hadoop.hbase.regionserver.compactions.OffPeakHours;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@@ -33,7 +33,8 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public abstract class PressureAwareThroughputController extends Configured implements
ThroughputController, Stoppable {
- private static final Log LOG = LogFactory.getLog(PressureAwareThroughputController.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(PressureAwareThroughputController.class);
/**
* Stores the information of one controlled compaction.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
index 992903656b..baa7590361 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java
@@ -48,8 +48,6 @@ import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.commons.lang3.mutable.MutableLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -62,6 +60,7 @@ import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.exceptions.TimeoutIOException;
import org.apache.hadoop.hbase.io.util.MemorySizeUtil;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl;
import org.apache.hadoop.hbase.trace.TraceUtil;
import org.apache.hadoop.hbase.util.Bytes;
@@ -83,7 +82,8 @@ import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.util.StringUtils;
import org.apache.htrace.core.TraceScope;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -115,7 +115,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
@InterfaceAudience.Private
public abstract class AbstractFSWAL implements WAL {
- private static final Log LOG = LogFactory.getLog(AbstractFSWAL.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AbstractFSWAL.class);
protected static final int DEFAULT_SLOW_SYNC_TIME_MS = 100; // in ms
@@ -1117,8 +1117,8 @@ public abstract class AbstractFSWAL implements WAL {
if (args[0].compareTo("--dump") == 0) {
WALPrettyPrinter.run(Arrays.copyOfRange(args, 1, args.length));
} else if (args[0].compareTo("--perf") == 0) {
- LOG.fatal("Please use the WALPerformanceEvaluation tool instead. i.e.:");
- LOG.fatal(
+ LOG.error(HBaseMarkers.FATAL, "Please use the WALPerformanceEvaluation tool instead. i.e.:");
+ LOG.error(HBaseMarkers.FATAL,
"\thbase org.apache.hadoop.hbase.wal.WALPerformanceEvaluation --iterations " + args[1]);
System.exit(-1);
} else if (args[0].compareTo("--split") == 0) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractProtobufLogWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractProtobufLogWriter.java
index 256ced64bd..befc5509fd 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractProtobufLogWriter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractProtobufLogWriter.java
@@ -28,14 +28,14 @@ import java.util.concurrent.atomic.AtomicLong;
import javax.crypto.spec.SecretKeySpec;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.codec.Codec;
import org.apache.hadoop.hbase.io.crypto.Cipher;
import org.apache.hadoop.hbase.io.crypto.Encryption;
@@ -56,7 +56,7 @@ import org.apache.hadoop.hbase.util.FSUtils;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public abstract class AbstractProtobufLogWriter {
- private static final Log LOG = LogFactory.getLog(AbstractProtobufLogWriter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AbstractProtobufLogWriter.class);
protected CompressionContext compressionContext;
protected Configuration conf;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
index f8355e0e87..e78b1db632 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
@@ -45,8 +45,6 @@ import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.function.Supplier;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -64,7 +62,8 @@ import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.htrace.core.TraceScope;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.hadoop.hbase.shaded.io.netty.channel.Channel;
import org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoop;
@@ -131,7 +130,7 @@ import org.apache.hadoop.hbase.shaded.io.netty.util.concurrent.SingleThreadEvent
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class AsyncFSWAL extends AbstractFSWAL {
- private static final Log LOG = LogFactory.getLog(AsyncFSWAL.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AsyncFSWAL.class);
private static final Comparator SEQ_COMPARATOR = (o1, o2) -> {
int c = Long.compare(o1.getTxid(), o2.getTxid());
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncProtobufLogWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncProtobufLogWriter.java
index 454928bb84..aa585e338c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncProtobufLogWriter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncProtobufLogWriter.java
@@ -25,8 +25,6 @@ import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.function.Consumer;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
@@ -38,7 +36,8 @@ import org.apache.hadoop.hbase.wal.AsyncFSWALProvider;
import org.apache.hadoop.hbase.wal.WAL.Entry;
import org.apache.hadoop.hbase.wal.WALKeyImpl;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
import org.apache.hadoop.hbase.shaded.io.netty.channel.Channel;
import org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoopGroup;
@@ -52,7 +51,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer;
public class AsyncProtobufLogWriter extends AbstractProtobufLogWriter
implements AsyncFSWALProvider.AsyncWriter {
- private static final Log LOG = LogFactory.getLog(AsyncProtobufLogWriter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AsyncProtobufLogWriter.class);
private final EventLoopGroup eventLoopGroup;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
index fd9d6c122a..c0f454e0b9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
@@ -35,8 +35,6 @@ import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
@@ -58,7 +56,8 @@ import org.apache.hadoop.hdfs.client.HdfsDataOutputStream;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.htrace.core.TraceScope;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
@@ -101,7 +100,7 @@ public class FSHLog extends AbstractFSWAL {
// syncs and appends have completed -- so the log roller can swap the WAL out under it.
//
// We use ring buffer sequence as txid of FSWALEntry and SyncFuture.
- private static final Log LOG = LogFactory.getLog(FSHLog.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FSHLog.class);
/**
* The nexus at which all incoming handlers meet. Does appends and sync with an ordering. Appends
@@ -162,13 +161,13 @@ public class FSHLog extends AbstractFSWAL {
@Override
public void handleOnStartException(Throwable ex) {
- LOG.error(ex);
+ LOG.error(ex.toString(), ex);
throw new RuntimeException(ex);
}
@Override
public void handleOnShutdownException(Throwable ex) {
- LOG.error(ex);
+ LOG.error(ex.toString(), ex);
throw new RuntimeException(ex);
}
}
@@ -634,6 +633,7 @@ public class FSHLog extends AbstractFSWAL {
/**
* @return true if number of replicas for the WAL is lower than threshold
*/
+ @Override
protected boolean doCheckLogLowReplication() {
boolean logRollNeeded = false;
// if the number of replicas in HDFS has fallen below the configured
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java
index ba9c0e6a69..b19f93b333 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java
@@ -23,9 +23,9 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
@@ -37,7 +37,7 @@ import org.apache.hadoop.util.StringUtils;
*/
@InterfaceAudience.Private
public class MetricsWAL implements WALActionsListener {
- private static final Log LOG = LogFactory.getLog(MetricsWAL.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MetricsWAL.class);
private final MetricsWALSource source;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java
index c199484271..6017a182f2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java
@@ -26,9 +26,9 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileSystem;
@@ -61,7 +61,7 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE
@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC, HBaseInterfaceAudience.PHOENIX,
HBaseInterfaceAudience.CONFIG})
public class ProtobufLogReader extends ReaderBase {
- private static final Log LOG = LogFactory.getLog(ProtobufLogReader.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ProtobufLogReader.class);
// public for WALFactory until we move everything to o.a.h.h.wal
@InterfaceAudience.Private
public static final byte[] PB_WAL_MAGIC = Bytes.toBytes("PWAL");
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java
index 7a135c9fe2..aeb2c19c25 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java
@@ -21,15 +21,13 @@ package org.apache.hadoop.hbase.regionserver.wal;
import java.io.IOException;
import java.io.OutputStream;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.wal.WALKeyImpl;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer;
import org.apache.hadoop.hbase.util.CommonFSUtils;
@@ -44,14 +42,14 @@ import org.apache.hadoop.hbase.wal.WAL.Entry;
public class ProtobufLogWriter extends AbstractProtobufLogWriter
implements FSHLogProvider.Writer {
- private static final Log LOG = LogFactory.getLog(ProtobufLogWriter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ProtobufLogWriter.class);
protected FSDataOutputStream output;
@Override
public void append(Entry entry) throws IOException {
entry.setCompressionContext(compressionContext);
- ((WALKeyImpl)entry.getKey()).getBuilder(compressor).
+ entry.getKey().getBuilder(compressor).
setFollowingKvCount(entry.getEdit().size()).build().writeDelimitedTo(output);
for (Cell cell : entry.getEdit().getCells()) {
// cellEncoder must assume little about the stream, since we write PB and cells in turn.
@@ -68,7 +66,7 @@ public class ProtobufLogWriter extends AbstractProtobufLogWriter
this.output.close();
} catch (NullPointerException npe) {
// Can get a NPE coming up from down in DFSClient$DFSOutputStream#close
- LOG.warn(npe);
+ LOG.warn(npe.toString(), npe);
}
this.output = null;
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ReaderBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ReaderBase.java
index 9a6bfd3958..f0573587a6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ReaderBase.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ReaderBase.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.regionserver.wal;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
@@ -34,10 +32,12 @@ import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
import org.apache.hadoop.hbase.wal.WAL.Entry;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC, HBaseInterfaceAudience.PHOENIX})
public abstract class ReaderBase implements AbstractFSWALProvider.Reader {
- private static final Log LOG = LogFactory.getLog(ReaderBase.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReaderBase.class);
protected Configuration conf;
protected FileSystem fs;
protected Path path;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureProtobufLogReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureProtobufLogReader.java
index 2765f94995..b1f17ad4ea 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureProtobufLogReader.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureProtobufLogReader.java
@@ -24,9 +24,9 @@ import java.security.KeyException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HConstants;
@@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.util.EncryptionTest;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class SecureProtobufLogReader extends ProtobufLogReader {
- private static final Log LOG = LogFactory.getLog(SecureProtobufLogReader.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SecureProtobufLogReader.class);
private Decryptor decryptor = null;
private static List writerClsNames = new ArrayList<>();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceIdAccounting.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceIdAccounting.java
index 61586548ab..f21c1f0631 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceIdAccounting.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceIdAccounting.java
@@ -30,11 +30,11 @@ import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ImmutableByteArray;
@@ -52,7 +52,7 @@ import org.apache.hadoop.hbase.util.ImmutableByteArray;
@InterfaceAudience.Private
class SequenceIdAccounting {
- private static final Log LOG = LogFactory.getLog(SequenceIdAccounting.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SequenceIdAccounting.class);
/**
* This lock ties all operations on {@link SequenceIdAccounting#flushingSequenceIds} and
* {@link #lowestUnflushedSequenceIds} Maps. {@link #lowestUnflushedSequenceIds} has the
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCoprocessorHost.java
index 0edd5d4e19..7b6182e6b6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCoprocessorHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCoprocessorHost.java
@@ -22,8 +22,6 @@ package org.apache.hadoop.hbase.regionserver.wal;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -38,6 +36,8 @@ import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Implements the coprocessor environment and runtime support for coprocessors
@@ -46,7 +46,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class WALCoprocessorHost
extends CoprocessorHost {
- private static final Log LOG = LogFactory.getLog(WALCoprocessorHost.class);
+ private static final Logger LOG = LoggerFactory.getLogger(WALCoprocessorHost.class);
/**
* Encapsulation of the environment of each coprocessor
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALUtil.java
index 518ee8fbe5..c3b67faf95 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALUtil.java
@@ -22,15 +22,14 @@ package org.apache.hadoop.hbase.regionserver.wal;
import java.io.IOException;
import java.util.NavigableMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.wal.WALKeyImpl;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor;
@@ -44,7 +43,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDe
*/
@InterfaceAudience.Private
public class WALUtil {
- private static final Log LOG = LogFactory.getLog(WALUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(WALUtil.class);
private WALUtil() {
// Shut down construction of this class.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java
index a7637b1462..c390d0967d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java
@@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.replication;
import java.io.IOException;
import java.util.ArrayList;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.AbstractService;
@@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.Abstract
public abstract class BaseReplicationEndpoint extends AbstractService
implements ReplicationEndpoint {
- private static final Log LOG = LogFactory.getLog(BaseReplicationEndpoint.class);
+ private static final Logger LOG = LoggerFactory.getLogger(BaseReplicationEndpoint.class);
public static final String REPLICATION_WALENTRYFILTER_CONFIG_KEY
= "hbase.replication.source.custom.walentryfilters";
protected Context ctx;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java
index 5f465ce6c3..d5506b17d8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java
@@ -22,21 +22,20 @@ import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.ExtendedCellBuilder;
import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
import org.apache.hadoop.hbase.wal.WALEdit;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Predicate;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor;
public class BulkLoadCellFilter {
- private static final Log LOG = LogFactory.getLog(BulkLoadCellFilter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(BulkLoadCellFilter.class);
private final ExtendedCellBuilder cellBuilder = ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
/**
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/HBaseReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/HBaseReplicationEndpoint.java
index 4985b82ae8..bd5c529092 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/HBaseReplicationEndpoint.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/HBaseReplicationEndpoint.java
@@ -24,8 +24,6 @@ import java.util.Collections;
import java.util.List;
import java.util.UUID;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.zookeeper.ZKListener;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.Abortable;
@@ -37,6 +35,8 @@ import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.KeeperException.AuthFailedException;
import org.apache.zookeeper.KeeperException.ConnectionLossException;
import org.apache.zookeeper.KeeperException.SessionExpiredException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A {@link BaseReplicationEndpoint} for replication endpoints whose
@@ -48,7 +48,7 @@ import org.apache.zookeeper.KeeperException.SessionExpiredException;
public abstract class HBaseReplicationEndpoint extends BaseReplicationEndpoint
implements Abortable {
- private static final Log LOG = LogFactory.getLog(HBaseReplicationEndpoint.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HBaseReplicationEndpoint.class);
private ZKWatcher zkw = null; // FindBugs: MT_CORRECTNESS
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/NamespaceTableCfWALEntryFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/NamespaceTableCfWALEntryFilter.java
index 5068cce8ef..b540416fec 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/NamespaceTableCfWALEntryFilter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/NamespaceTableCfWALEntryFilter.java
@@ -22,12 +22,12 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.wal.WAL.Entry;
@@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.wal.WAL.Entry;
@InterfaceAudience.Private
public class NamespaceTableCfWALEntryFilter implements WALEntryFilter, WALCellFilter {
- private static final Log LOG = LogFactory.getLog(NamespaceTableCfWALEntryFilter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(NamespaceTableCfWALEntryFilter.class);
private final ReplicationPeer peer;
private BulkLoadCellFilter bulkLoadFilter = new BulkLoadCellFilter();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationHFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationHFileCleaner.java
index 5972734f6d..4e9d67a036 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationHFileCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationHFileCleaner.java
@@ -21,8 +21,6 @@ import java.util.Collections;
import java.util.List;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.hbase.Abortable;
@@ -36,6 +34,8 @@ import org.apache.hadoop.hbase.replication.ReplicationQueuesClient;
import org.apache.hadoop.hbase.replication.ReplicationQueuesClientArguments;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Implementation of a file cleaner that checks if a hfile is still scheduled for replication before
@@ -43,7 +43,7 @@ import org.apache.zookeeper.KeeperException;
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class ReplicationHFileCleaner extends BaseHFileCleanerDelegate {
- private static final Log LOG = LogFactory.getLog(ReplicationHFileCleaner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationHFileCleaner.class);
private ZKWatcher zkw;
private ReplicationQueuesClient rqc;
private boolean stopped = false;
@@ -192,9 +192,7 @@ public class ReplicationHFileCleaner extends BaseHFileCleanerDelegate {
@Override
public void abort(String why, Throwable e) {
LOG.warn("ReplicationHFileCleaner received abort, ignoring. Reason: " + why);
- if (LOG.isDebugEnabled()) {
- LOG.debug(e);
- }
+ LOG.debug(e.toString(), e);
}
@Override
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationLogCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationLogCleaner.java
index 57ed8427ac..773e10e0e3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationLogCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationLogCleaner.java
@@ -18,11 +18,10 @@
*/
package org.apache.hadoop.hbase.replication.master;
-import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-import org.apache.yetus.audience.InterfaceAudience;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.Set;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.hbase.Abortable;
@@ -31,15 +30,16 @@ import org.apache.hadoop.hbase.master.cleaner.BaseLogCleanerDelegate;
import org.apache.hadoop.hbase.replication.ReplicationFactory;
import org.apache.hadoop.hbase.replication.ReplicationQueuesClient;
import org.apache.hadoop.hbase.replication.ReplicationQueuesClientArguments;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
-import java.io.IOException;
-import java.util.Collections;
-import java.util.Set;
-
+import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Predicate;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables;
-import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-import org.apache.zookeeper.KeeperException;
/**
* Implementation of a log cleaner that checks if a log is still scheduled for
@@ -47,7 +47,7 @@ import org.apache.zookeeper.KeeperException;
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class ReplicationLogCleaner extends BaseLogCleanerDelegate {
- private static final Log LOG = LogFactory.getLog(ReplicationLogCleaner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationLogCleaner.class);
private ZKWatcher zkw;
private ReplicationQueuesClient replicationQueues;
private boolean stopped = false;
@@ -140,9 +140,7 @@ public class ReplicationLogCleaner extends BaseLogCleanerDelegate {
@Override
public void abort(String why, Throwable e) {
LOG.warn("ReplicationLogCleaner received abort, ignoring. Reason: " + why);
- if (LOG.isDebugEnabled()) {
- LOG.debug(e);
- }
+ LOG.debug(e.toString(), e);
}
@Override
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationPeerConfigUpgrader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationPeerConfigUpgrader.java
index 5c8fba3970..ea5509f64e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationPeerConfigUpgrader.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationPeerConfigUpgrader.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.replication.master;
import java.io.IOException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -38,7 +36,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
import org.apache.zookeeper.KeeperException;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos;
/**
@@ -49,7 +48,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos;
@InterfaceStability.Unstable
public class ReplicationPeerConfigUpgrader extends ReplicationStateZKBase {
- private static final Log LOG = LogFactory.getLog(ReplicationPeerConfigUpgrader.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationPeerConfigUpgrader.class);
public ReplicationPeerConfigUpgrader(ZKWatcher zookeeper,
Configuration conf, Abortable abortable) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DefaultSourceFSConfigurationProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DefaultSourceFSConfigurationProvider.java
index 8cfc3ce9b7..b28c58fc28 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DefaultSourceFSConfigurationProvider.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DefaultSourceFSConfigurationProvider.java
@@ -16,14 +16,14 @@ import java.net.URL;
import java.util.HashMap;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This will load all the xml configuration files for the source cluster replication ID from
@@ -31,7 +31,9 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class DefaultSourceFSConfigurationProvider implements SourceFSConfigurationProvider {
- private static final Log LOG = LogFactory.getLog(DefaultSourceFSConfigurationProvider.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(DefaultSourceFSConfigurationProvider.class);
+
// Map containing all the source clusters configurations against their replication cluster id
private Map sourceClustersConfs = new HashMap<>();
private static final String XML = ".xml";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java
index ff5e5c7e12..93b86494de 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java
@@ -28,8 +28,6 @@ import java.util.Queue;
import java.util.Set;
import java.util.stream.Collectors;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileStatus;
@@ -59,6 +57,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.AtomicLongMap;
/**
@@ -71,7 +71,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.AtomicLo
public class DumpReplicationQueues extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(DumpReplicationQueues.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(DumpReplicationQueues.class.getName());
private List deadRegionServers;
private List deletedQueues;
@@ -417,7 +417,7 @@ public class DumpReplicationQueues extends Configured implements Tool {
public void abort(String why, Throwable e) {
LOG.warn("DumpReplicationQueue received abort, ignoring. Reason: " + why);
if (LOG.isDebugEnabled()) {
- LOG.debug(e);
+ LOG.debug(e.toString(), e);
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java
index c1ed64413d..16fd0c3d7f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java
@@ -40,8 +40,6 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Abortable;
@@ -50,6 +48,8 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.ClusterConnection;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -77,8 +77,8 @@ import org.apache.hadoop.ipc.RemoteException;
*/
@InterfaceAudience.Private
public class HBaseInterClusterReplicationEndpoint extends HBaseReplicationEndpoint {
-
- private static final Log LOG = LogFactory.getLog(HBaseInterClusterReplicationEndpoint.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(HBaseInterClusterReplicationEndpoint.class);
private static final long DEFAULT_MAX_TERMINATION_WAIT_MULTIPLIER = 2;
@@ -144,7 +144,7 @@ public class HBaseInterClusterReplicationEndpoint extends HBaseReplicationEndpoi
// Set the size limit for replication RPCs to 95% of the max request size.
// We could do with less slop if we have an accurate estimate of encoded size. Being
// conservative for now.
- this.replicationRpcLimit = (int)(0.95 * (double)conf.getLong(RpcServer.MAX_REQUEST_SIZE,
+ this.replicationRpcLimit = (int)(0.95 * conf.getLong(RpcServer.MAX_REQUEST_SIZE,
RpcServer.DEFAULT_MAX_REQUEST_SIZE));
this.dropOnDeletedTables =
this.conf.getBoolean(HConstants.REPLICATION_DROP_ON_DELETED_TABLE_KEY, false);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java
index eb29ac4dc9..a2cd03e7e8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java
@@ -31,8 +31,6 @@ import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
@@ -42,6 +40,8 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.Table;
@@ -70,7 +70,7 @@ public class HFileReplicator {
"hbase.replication.bulkload.copy.hfiles.perthread";
public static final int REPLICATION_BULKLOAD_COPY_HFILES_PERTHREAD_DEFAULT = 10;
- private static final Log LOG = LogFactory.getLog(HFileReplicator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HFileReplicator.class);
private static final String UNDERSCORE = "_";
private final static FsPermission PERM_ALL_ACCESS = FsPermission.valueOf("-rwxrwxrwx");
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java
index 09ddbd40bd..9ca1c84431 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java
@@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.replication.regionserver;
import java.util.HashMap;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.metrics.BaseSource;
@@ -36,7 +36,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.REPLICATION)
public class MetricsSource implements BaseSource {
- private static final Log LOG = LogFactory.getLog(MetricsSource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MetricsSource.class);
// tracks last shipped timestamp for each wal group
private Map lastTimeStamps = new HashMap<>();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java
index cabf85a7f8..bd191e3397 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java
@@ -23,8 +23,6 @@ import java.util.List;
import java.util.UUID;
import java.util.concurrent.PriorityBlockingQueue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -32,6 +30,8 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.ServerName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
import org.apache.hadoop.hbase.replication.ReplicationPeers;
import org.apache.hadoop.hbase.replication.ReplicationQueues;
@@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
@InterfaceAudience.Private
public class RecoveredReplicationSource extends ReplicationSource {
- private static final Log LOG = LogFactory.getLog(RecoveredReplicationSource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RecoveredReplicationSource.class);
private String actualPeerId;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSourceShipper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSourceShipper.java
index af84868d61..630b90b68e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSourceShipper.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSourceShipper.java
@@ -21,11 +21,11 @@ package org.apache.hadoop.hbase.replication.regionserver;
import java.io.IOException;
import java.util.concurrent.PriorityBlockingQueue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.replication.ReplicationException;
import org.apache.hadoop.hbase.replication.ReplicationQueues;
import org.apache.hadoop.hbase.replication.regionserver.ReplicationSourceWALReader.WALEntryBatch;
@@ -36,8 +36,9 @@ import org.apache.hadoop.hbase.util.Threads;
*/
@InterfaceAudience.Private
public class RecoveredReplicationSourceShipper extends ReplicationSourceShipper {
+ private static final Logger LOG =
+ LoggerFactory.getLogger(RecoveredReplicationSourceShipper.class);
- private static final Log LOG = LogFactory.getLog(RecoveredReplicationSourceShipper.class);
protected final RecoveredReplicationSource source;
private final ReplicationQueues replicationQueues;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSourceWALReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSourceWALReader.java
index edd1b2ad68..0af3f5cacc 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSourceWALReader.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSourceWALReader.java
@@ -20,13 +20,13 @@ package org.apache.hadoop.hbase.replication.regionserver;
import java.util.concurrent.PriorityBlockingQueue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.replication.WALEntryFilter;
/**
@@ -35,7 +35,8 @@ import org.apache.hadoop.hbase.replication.WALEntryFilter;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class RecoveredReplicationSourceWALReader extends ReplicationSourceWALReader {
- private static final Log LOG = LogFactory.getLog(RecoveredReplicationSourceWALReader.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(RecoveredReplicationSourceWALReader.class);
public RecoveredReplicationSourceWALReader(FileSystem fs, Configuration conf,
PriorityBlockingQueue logQueue, long startPosition, WALEntryFilter filter,
@@ -43,6 +44,7 @@ public class RecoveredReplicationSourceWALReader extends ReplicationSourceWALRea
super(fs, conf, logQueue, startPosition, filter, source);
}
+ @Override
protected void handleEmptyWALEntryBatch(WALEntryBatch batch, Path currentPath)
throws InterruptedException {
LOG.trace("Didn't read any new entries from WAL");
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java
index 3d39146cac..b9f2d0df6b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java
@@ -32,8 +32,6 @@ import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CellScanner;
@@ -70,7 +68,8 @@ import org.apache.hadoop.hbase.wal.WALSplitter.RegionEntryBuffer;
import org.apache.hadoop.hbase.wal.WALSplitter.SinkWriter;
import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.cache.Cache;
import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheBuilder;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
@@ -84,7 +83,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWA
@InterfaceAudience.Private
public class RegionReplicaReplicationEndpoint extends HBaseReplicationEndpoint {
- private static final Log LOG = LogFactory.getLog(RegionReplicaReplicationEndpoint.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionReplicaReplicationEndpoint.class);
// Can be configured differently than hbase.client.retries.number
private static String CLIENT_RETRIES_NUMBER
@@ -161,8 +160,7 @@ public class RegionReplicaReplicationEndpoint extends HBaseReplicationEndpoint {
try {
outputSink.finishWritingAndClose();
} catch (IOException ex) {
- LOG.warn("Got exception while trying to close OutputSink");
- LOG.warn(ex);
+ LOG.warn("Got exception while trying to close OutputSink", ex);
}
}
if (this.pool != null) {
@@ -583,6 +581,7 @@ public class RegionReplicaReplicationEndpoint extends HBaseReplicationEndpoint {
this.initialEncodedRegionName = regionInfo.getEncodedNameAsBytes();
}
+ @Override
public ReplicateWALEntryResponse call(HBaseRpcController controller) throws Exception {
// Check whether we should still replay this entry. If the regions are changed, or the
// entry is not coming form the primary region, filter it out because we do not need it.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java
index 2a2df60303..d8212e95ae 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java
@@ -29,8 +29,6 @@ import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.wal.WALKeyImpl;
import org.apache.yetus.audience.InterfaceAudience;
@@ -62,7 +60,8 @@ import org.apache.hadoop.hbase.replication.master.ReplicationHFileCleaner;
import org.apache.hadoop.hbase.replication.master.ReplicationLogCleaner;
import org.apache.hadoop.hbase.zookeeper.ZKClusterId;
import org.apache.zookeeper.KeeperException;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
/**
@@ -71,8 +70,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa
@InterfaceAudience.Private
public class Replication implements
ReplicationSourceService, ReplicationSinkService, WALActionsListener {
- private static final Log LOG =
- LogFactory.getLog(Replication.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(Replication.class);
private boolean replicationForBulkLoadData;
private ReplicationSourceManager replicationManager;
private ReplicationQueues replicationQueues;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationObserver.java
index e72f6e2a09..ec478d5e09 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationObserver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationObserver.java
@@ -23,8 +23,6 @@ import java.io.IOException;
import java.util.List;
import java.util.Optional;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.coprocessor.CoreCoprocessor;
@@ -32,6 +30,8 @@ import org.apache.hadoop.hbase.coprocessor.HasRegionServerServices;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
@@ -48,7 +48,7 @@ import javax.validation.constraints.Null;
@CoreCoprocessor
@InterfaceAudience.Private
public class ReplicationObserver implements RegionCoprocessor, RegionObserver {
- private static final Log LOG = LogFactory.getLog(ReplicationObserver.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationObserver.class);
@Override
public Optional getRegionObserver() {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java
index 2194796659..57e185a7a2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java
@@ -31,8 +31,6 @@ import java.util.UUID;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
@@ -44,6 +42,8 @@ import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Delete;
@@ -78,7 +78,7 @@ import org.apache.hadoop.hbase.util.Pair;
@InterfaceAudience.Private
public class ReplicationSink {
- private static final Log LOG = LogFactory.getLog(ReplicationSink.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationSink.class);
private final Configuration conf;
// Volatile because of note in here -- look for double-checked locking:
// http://www.oracle.com/technetwork/articles/javase/bloch-effective-08-qa-140880.html
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSinkManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSinkManager.java
index eb882f3a49..58685c2495 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSinkManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSinkManager.java
@@ -17,25 +17,24 @@
*/
package org.apache.hadoop.hbase.replication.regionserver;
-import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
-
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.client.ClusterConnection;
import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
import org.apache.hadoop.hbase.replication.HBaseReplicationEndpoint;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
/**
* Maintains a collection of peers to replicate to, and randomly selects a
@@ -44,7 +43,7 @@ import org.apache.hadoop.hbase.replication.HBaseReplicationEndpoint;
*/
public class ReplicationSinkManager {
- private static final Log LOG = LogFactory.getLog(ReplicationSinkManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationSinkManager.class);
/**
* Default maximum number of times a replication sink can be reported as bad before
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
index ea6c6d44af..f4f35ae0c5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
@@ -33,8 +33,6 @@ import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -45,6 +43,8 @@ import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.RSRpcServices;
import org.apache.hadoop.hbase.replication.ChainWALEntryFilter;
import org.apache.hadoop.hbase.replication.ClusterMarkingEntryFilter;
@@ -78,7 +78,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@InterfaceAudience.Private
public class ReplicationSource extends Thread implements ReplicationSourceInterface {
- private static final Log LOG = LogFactory.getLog(ReplicationSource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationSource.class);
// Queues of logs to process, entry in format of walGroupId->queue,
// each presents a queue for one wal group
private Map> queues = new HashMap<>();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceFactory.java
index e97da24a56..865a202870 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceFactory.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceFactory.java
@@ -18,10 +18,10 @@
*/
package org.apache.hadoop.hbase.replication.regionserver;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.replication.ReplicationQueueInfo;
/**
@@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.replication.ReplicationQueueInfo;
@InterfaceAudience.Private
public class ReplicationSourceFactory {
- private static final Log LOG = LogFactory.getLog(ReplicationSourceFactory.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationSourceFactory.class);
static ReplicationSourceInterface create(Configuration conf, String peerId) {
ReplicationQueueInfo replicationQueueInfo = new ReplicationQueueInfo(peerId);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
index c518ece18f..07c53e1c9e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
@@ -40,8 +40,7 @@ import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -67,7 +66,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
@@ -89,8 +89,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa
*/
@InterfaceAudience.Private
public class ReplicationSourceManager implements ReplicationListener {
- private static final Log LOG =
- LogFactory.getLog(ReplicationSourceManager.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(ReplicationSourceManager.class);
// List of all the sources that read this RS's logs
private final List sources;
// List of all the sources we got from died RSs
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipper.java
index 9dfe686d17..1e1dcc8c15 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipper.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipper.java
@@ -24,14 +24,14 @@ import java.util.Map;
import java.util.concurrent.PriorityBlockingQueue;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
import org.apache.hadoop.hbase.replication.regionserver.ReplicationSourceWALReader.WALEntryBatch;
@@ -52,7 +52,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCache;
*/
@InterfaceAudience.Private
public class ReplicationSourceShipper extends Thread {
- private static final Log LOG = LogFactory.getLog(ReplicationSourceShipper.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationSourceShipper.class);
// Hold the state of a replication worker thread
public enum WorkerState {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceWALReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceWALReader.java
index bbcaaa4d73..1ec797fb14 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceWALReader.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceWALReader.java
@@ -29,8 +29,6 @@ import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.PriorityBlockingQueue;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -45,7 +43,8 @@ import org.apache.hadoop.hbase.wal.WAL.Entry;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor;
@@ -57,7 +56,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescript
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class ReplicationSourceWALReader extends Thread {
- private static final Log LOG = LogFactory.getLog(ReplicationSourceWALReader.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationSourceWALReader.class);
private final PriorityBlockingQueue logQueue;
private final FileSystem fs;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java
index db3b1fc037..21b8ac5c8c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.replication.regionserver;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
@@ -39,6 +37,8 @@ import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* In a scenario of Replication based Disaster/Recovery, when hbase
@@ -51,7 +51,7 @@ import org.apache.hadoop.util.ToolRunner;
public class ReplicationSyncUp extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(ReplicationSyncUp.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationSyncUp.class.getName());
private static Configuration conf;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.java
index 6277d24c5b..7c83c0c1fa 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.java
@@ -25,8 +25,6 @@ import java.util.NoSuchElementException;
import java.util.OptionalLong;
import java.util.concurrent.PriorityBlockingQueue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -35,6 +33,8 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader;
import org.apache.hadoop.hbase.util.CancelableProgressable;
import org.apache.hadoop.hbase.util.FSUtils;
@@ -53,7 +53,7 @@ import org.apache.hadoop.ipc.RemoteException;
@InterfaceAudience.Private
@InterfaceStability.Evolving
class WALEntryStream implements Closeable {
- private static final Log LOG = LogFactory.getLog(WALEntryStream.class);
+ private static final Logger LOG = LoggerFactory.getLogger(WALEntryStream.class);
private Reader reader;
private Path currentPath;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java
index 7059bd8dc4..b3924350b4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java
@@ -34,9 +34,9 @@ import javax.security.sasl.Sasl;
import javax.security.sasl.SaslException;
import javax.security.sasl.SaslServer;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.SecretManager;
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
@@ -49,7 +49,7 @@ import org.apache.hadoop.security.token.TokenIdentifier;
@InterfaceAudience.Private
public class HBaseSaslRpcServer {
- private static final Log LOG = LogFactory.getLog(HBaseSaslRpcServer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HBaseSaslRpcServer.class);
private final SaslServer saslServer;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
index 0b40b7167f..7b0e4cdf0d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
@@ -32,8 +32,6 @@ import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.AuthUtil;
import org.apache.hadoop.hbase.Cell;
@@ -75,6 +73,8 @@ import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableFactories;
import org.apache.hadoop.io.WritableUtils;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Maintains lists of permission grants to users and groups to allow for
@@ -119,7 +119,7 @@ public class AccessControlLists {
* _acl_ table info: column keys */
public static final char ACL_KEY_DELIMITER = ',';
- private static final Log LOG = LogFactory.getLog(AccessControlLists.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AccessControlLists.class);
/**
* Stores a new user permission grant in the access control lists table.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
index 0f9d8a5a8f..0bb61c921f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
@@ -39,8 +39,6 @@ import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
@@ -139,6 +137,8 @@ import org.apache.hadoop.hbase.util.SimpleMutableByteRange;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Provides basic authorization checks for data access and administrative
@@ -180,10 +180,10 @@ public class AccessController implements MasterCoprocessor, RegionCoprocessor,
MasterObserver, RegionObserver, RegionServerObserver, EndpointObserver, BulkLoadObserver {
// TODO: encapsulate observer functions into separate class/sub-class.
- private static final Log LOG = LogFactory.getLog(AccessController.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AccessController.class);
- private static final Log AUDITLOG =
- LogFactory.getLog("SecurityLogger."+AccessController.class.getName());
+ private static final Logger AUDITLOG =
+ LoggerFactory.getLogger("SecurityLogger."+AccessController.class.getName());
private static final String CHECK_COVERING_PERM = "check_covering_perm";
private static final String TAG_CHECK_PASSED = "tag_check_passed";
private static final byte[] TRUE = Bytes.toBytes(true);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java
index a1179b1dc3..44a4f57204 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java
@@ -24,8 +24,6 @@ import java.util.Optional;
import java.util.regex.Matcher;
import org.apache.commons.io.FilenameUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@@ -39,6 +37,8 @@ import org.apache.hadoop.hbase.coprocessor.MasterObserver;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Master observer for restricting coprocessor assignments.
@@ -49,8 +49,8 @@ public class CoprocessorWhitelistMasterObserver implements MasterCoprocessor, Ma
public static final String CP_COPROCESSOR_WHITELIST_PATHS_KEY =
"hbase.coprocessor.region.whitelist.paths";
- private static final Log LOG = LogFactory
- .getLog(CoprocessorWhitelistMasterObserver.class);
+ private static final Logger LOG = LoggerFactory
+ .getLogger(CoprocessorWhitelistMasterObserver.class);
@Override
public Optional getMasterObserver() {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java
index 2ba4ac5549..53b10d0a8d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java
@@ -33,8 +33,6 @@ import java.util.Map;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.AuthUtil;
import org.apache.hadoop.hbase.Cell;
@@ -42,11 +40,14 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.security.Superusers;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Performs authorization checks for a given user's assigned permissions
@@ -97,7 +98,7 @@ public class TableAuthManager implements Closeable {
}
}
- private static final Log LOG = LogFactory.getLog(TableAuthManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TableAuthManager.class);
/** Cache of global permissions */
private volatile PermissionCache globalCache;
@@ -769,7 +770,7 @@ public class TableAuthManager implements Closeable {
if (refCount.get(instance) == null || refCount.get(instance) < 1) {
String msg = "Something wrong with the TableAuthManager reference counting: " + instance
+ " whose count is " + refCount.get(instance);
- LOG.fatal(msg);
+ LOG.error(HBaseMarkers.FATAL, msg);
instance.close();
managerMap.remove(instance.getZKPermissionWatcher().getWatcher());
instance.getZKPermissionWatcher().getWatcher().abort(msg, null);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java
index d45b5b5730..2437657fbc 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java
@@ -18,8 +18,6 @@
package org.apache.hadoop.hbase.security.access;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.DaemonThreadFactory;
import org.apache.hadoop.hbase.TableName;
@@ -30,6 +28,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.Closeable;
import java.io.IOException;
@@ -53,7 +53,7 @@ import java.util.concurrent.RejectedExecutionException;
*/
@InterfaceAudience.Private
public class ZKPermissionWatcher extends ZKListener implements Closeable {
- private static final Log LOG = LogFactory.getLog(ZKPermissionWatcher.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ZKPermissionWatcher.class);
// parent node for permissions lists
static final String ACL_NODE = "acl";
private final TableAuthManager authManager;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java
index aa6b1e94b7..de8ea5d3ab 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java
@@ -25,8 +25,6 @@ import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
@@ -40,6 +38,8 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.token.SecretManager;
import org.apache.hadoop.security.token.Token;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Manages an internal list of secret keys used to sign new authentication
@@ -60,7 +60,7 @@ public class AuthenticationTokenSecretManager
static final String NAME_PREFIX = "SecretManager-";
- private static final Log LOG = LogFactory.getLog(
+ private static final Logger LOG = LoggerFactory.getLogger(
AuthenticationTokenSecretManager.class);
private long lastKeyUpdate;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/FsDelegationToken.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/FsDelegationToken.java
index 3bf4df101a..389bcc6be9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/FsDelegationToken.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/FsDelegationToken.java
@@ -20,10 +20,10 @@ package org.apache.hadoop.hbase.security.token;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.security.token.Token;
@@ -36,7 +36,7 @@ import org.apache.hadoop.security.token.Token;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class FsDelegationToken {
- private static final Log LOG = LogFactory.getLog(FsDelegationToken.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FsDelegationToken.class);
private final UserProvider userProvider;
private final String renewer;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java
index e355752013..b137aaa30e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java
@@ -24,8 +24,6 @@ import com.google.protobuf.Service;
import java.io.IOException;
import java.util.Collections;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.CoreCoprocessor;
import org.apache.hadoop.hbase.coprocessor.HasRegionServerServices;
@@ -43,6 +41,8 @@ import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
import org.apache.hadoop.security.token.SecretManager;
import org.apache.hadoop.security.token.Token;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Provides a service for obtaining authentication tokens via the
@@ -53,7 +53,7 @@ import org.apache.yetus.audience.InterfaceAudience;
public class TokenProvider implements AuthenticationProtos.AuthenticationService.Interface,
RegionCoprocessor {
- private static final Log LOG = LogFactory.getLog(TokenProvider.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TokenProvider.class);
private AuthenticationTokenSecretManager secretManager;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java
index 3347e1caa6..5461760137 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java
@@ -24,8 +24,7 @@ import java.security.PrivilegedExceptionAction;
import com.google.protobuf.ByteString;
import com.google.protobuf.ServiceException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
@@ -43,6 +42,8 @@ import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.security.token.Token;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Utility methods for obtaining authentication tokens.
@@ -50,7 +51,7 @@ import org.apache.zookeeper.KeeperException;
@InterfaceAudience.Public
public class TokenUtil {
// This class is referenced indirectly by User out in common; instances are created by reflection
- private static final Log LOG = LogFactory.getLog(TokenUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TokenUtil.class);
/**
* Obtain and return an authentication token for the current user.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/ZKSecretWatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/ZKSecretWatcher.java
index 96502fd82a..d31b8a9652 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/ZKSecretWatcher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/ZKSecretWatcher.java
@@ -18,13 +18,12 @@
package org.apache.hadoop.hbase.security.token;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import java.io.IOException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.zookeeper.ZKListener;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
@@ -33,6 +32,8 @@ import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Synchronizes token encryption keys across cluster nodes.
@@ -41,7 +42,7 @@ import org.apache.zookeeper.KeeperException;
public class ZKSecretWatcher extends ZKListener {
private static final String DEFAULT_ROOT_NODE = "tokenauth";
private static final String DEFAULT_KEYS_PARENT = "keys";
- private static final Log LOG = LogFactory.getLog(ZKSecretWatcher.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ZKSecretWatcher.class);
private AuthenticationTokenSecretManager secretManager;
private String baseKeyZNode;
@@ -77,7 +78,7 @@ public class ZKSecretWatcher extends ZKListener {
ZKUtil.getChildDataAndWatchForNewChildren(watcher, keysParentZNode);
refreshNodes(nodes);
} catch (KeeperException ke) {
- LOG.fatal("Error reading data from zookeeper", ke);
+ LOG.error(HBaseMarkers.FATAL, "Error reading data from zookeeper", ke);
watcher.abort("Error reading new key znode "+path, ke);
}
}
@@ -110,10 +111,10 @@ public class ZKSecretWatcher extends ZKListener {
new AuthenticationKey());
secretManager.addKey(key);
} catch (KeeperException ke) {
- LOG.fatal("Error reading data from zookeeper", ke);
+ LOG.error(HBaseMarkers.FATAL, "Error reading data from zookeeper", ke);
watcher.abort("Error reading updated key znode "+path, ke);
} catch (IOException ioe) {
- LOG.fatal("Error reading key writables", ioe);
+ LOG.error(HBaseMarkers.FATAL, "Error reading key writables", ioe);
watcher.abort("Error reading key writables from znode "+path, ioe);
}
}
@@ -128,7 +129,7 @@ public class ZKSecretWatcher extends ZKListener {
ZKUtil.getChildDataAndWatchForNewChildren(watcher, keysParentZNode);
refreshNodes(nodes);
} catch (KeeperException ke) {
- LOG.fatal("Error reading data from zookeeper", ke);
+ LOG.error(HBaseMarkers.FATAL, "Error reading data from zookeeper", ke);
watcher.abort("Error reading changed keys from zookeeper", ke);
}
}
@@ -152,8 +153,8 @@ public class ZKSecretWatcher extends ZKListener {
data, new AuthenticationKey());
secretManager.addKey(key);
} catch (IOException ioe) {
- LOG.fatal("Failed reading new secret key for id '" + keyId +
- "' from zk", ioe);
+ LOG.error(HBaseMarkers.FATAL, "Failed reading new secret key for id '" +
+ keyId + "' from zk", ioe);
watcher.abort("Error deserializing key from znode "+path, ioe);
}
}
@@ -170,8 +171,8 @@ public class ZKSecretWatcher extends ZKListener {
} catch (KeeperException.NoNodeException nne) {
LOG.error("Non-existent znode "+keyZNode+" for key "+key.getKeyId(), nne);
} catch (KeeperException ke) {
- LOG.fatal("Failed removing znode "+keyZNode+" for key "+key.getKeyId(),
- ke);
+ LOG.error(HBaseMarkers.FATAL, "Failed removing znode "+keyZNode+" for key "+
+ key.getKeyId(), ke);
watcher.abort("Unhandled zookeeper error removing znode "+keyZNode+
" for key "+key.getKeyId(), ke);
}
@@ -184,7 +185,7 @@ public class ZKSecretWatcher extends ZKListener {
// TODO: is there any point in retrying beyond what ZK client does?
ZKUtil.createSetData(watcher, keyZNode, keyData);
} catch (KeeperException ke) {
- LOG.fatal("Unable to synchronize master key "+key.getKeyId()+
+ LOG.error(HBaseMarkers.FATAL, "Unable to synchronize master key "+key.getKeyId()+
" to znode "+keyZNode, ke);
watcher.abort("Unable to synchronize secret key "+
key.getKeyId()+" in zookeeper", ke);
@@ -205,7 +206,7 @@ public class ZKSecretWatcher extends ZKListener {
ZKUtil.createSetData(watcher, keyZNode, keyData);
}
} catch (KeeperException ke) {
- LOG.fatal("Unable to update master key "+key.getKeyId()+
+ LOG.error(HBaseMarkers.FATAL, "Unable to update master key "+key.getKeyId()+
" in znode "+keyZNode);
watcher.abort("Unable to synchronize secret key "+
key.getKeyId()+" in zookeeper", ke);
@@ -224,7 +225,7 @@ public class ZKSecretWatcher extends ZKListener {
ZKUtil.getChildDataAndWatchForNewChildren(watcher, keysParentZNode);
refreshNodes(nodes);
} catch (KeeperException ke) {
- LOG.fatal("Error reading data from zookeeper", ke);
+ LOG.error(HBaseMarkers.FATAL, "Error reading data from zookeeper", ke);
watcher.abort("Error reading changed keys from zookeeper", ke);
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java
index 0c6c914baa..fa7c7a74e0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java
@@ -40,8 +40,6 @@ import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.AuthUtil;
@@ -74,11 +72,13 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService {
-
- private static final Log LOG = LogFactory.getLog(DefaultVisibilityLabelServiceImpl.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(DefaultVisibilityLabelServiceImpl.class);
// "system" label is having an ordinal value 1.
private static final int SYSTEM_LABEL_ORDINAL = 1;
@@ -507,7 +507,7 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
authLabels = (authLabels == null) ? new ArrayList<>() : authLabels;
authorizations = new Authorizations(authLabels);
} catch (Throwable t) {
- LOG.error(t);
+ LOG.error(t.toString(), t);
throw new IOException(t);
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefinedSetFilterScanLabelGenerator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefinedSetFilterScanLabelGenerator.java
index 0b7214fb30..77bc2057cd 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefinedSetFilterScanLabelGenerator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefinedSetFilterScanLabelGenerator.java
@@ -22,9 +22,9 @@ import java.util.HashSet;
import java.util.List;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.security.User;
@@ -36,8 +36,8 @@ import org.apache.hadoop.hbase.security.User;
*/
@InterfaceAudience.Private
public class DefinedSetFilterScanLabelGenerator implements ScanLabelGenerator {
-
- private static final Log LOG = LogFactory.getLog(DefinedSetFilterScanLabelGenerator.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(DefinedSetFilterScanLabelGenerator.class);
private Configuration conf;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/EnforcingScanLabelGenerator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/EnforcingScanLabelGenerator.java
index a2a9e04115..e2bc16b5f0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/EnforcingScanLabelGenerator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/EnforcingScanLabelGenerator.java
@@ -22,9 +22,9 @@ import java.util.HashSet;
import java.util.List;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.security.User;
@@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.security.User;
@InterfaceAudience.Private
public class EnforcingScanLabelGenerator implements ScanLabelGenerator {
- private static final Log LOG = LogFactory.getLog(EnforcingScanLabelGenerator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(EnforcingScanLabelGenerator.class);
private Configuration conf;
private VisibilityLabelsCache labelsCache;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/FeedUserAuthScanLabelGenerator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/FeedUserAuthScanLabelGenerator.java
index cd6ef86c5c..1c77a4d008 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/FeedUserAuthScanLabelGenerator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/FeedUserAuthScanLabelGenerator.java
@@ -22,9 +22,9 @@ import java.util.HashSet;
import java.util.List;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.security.User;
@@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.security.User;
@InterfaceAudience.Private
public class FeedUserAuthScanLabelGenerator implements ScanLabelGenerator {
- private static final Log LOG = LogFactory.getLog(FeedUserAuthScanLabelGenerator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FeedUserAuthScanLabelGenerator.class);
private Configuration conf;
private VisibilityLabelsCache labelsCache;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
index c6f81c4c65..b90f10484e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
@@ -38,8 +38,6 @@ import java.util.List;
import java.util.Map;
import java.util.Optional;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.AuthUtil;
import org.apache.hadoop.hbase.Cell;
@@ -114,6 +112,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Coprocessor that has both the MasterObserver and RegionObserver implemented that supports in
@@ -125,8 +125,8 @@ import org.apache.yetus.audience.InterfaceAudience;
public class VisibilityController implements MasterCoprocessor, RegionCoprocessor,
VisibilityLabelsService.Interface, MasterObserver, RegionObserver {
- private static final Log LOG = LogFactory.getLog(VisibilityController.class);
- private static final Log AUDITLOG = LogFactory.getLog("SecurityLogger."
+ private static final Logger LOG = LoggerFactory.getLogger(VisibilityController.class);
+ private static final Logger AUDITLOG = LoggerFactory.getLogger("SecurityLogger."
+ VisibilityController.class.getName());
// flags if we are running on a region of the 'labels' table
private boolean labelsRegion = false;
@@ -772,7 +772,7 @@ public class VisibilityController implements MasterCoprocessor, RegionCoprocesso
LOG.error("User is not having required permissions to add labels", e);
setExceptionResults(visLabels.size(), e, response);
} catch (IOException e) {
- LOG.error(e);
+ LOG.error(e.toString(), e);
setExceptionResults(visLabels.size(), e, response);
}
}
@@ -827,7 +827,7 @@ public class VisibilityController implements MasterCoprocessor, RegionCoprocesso
LOG.error("User is not having required permissions to set authorization", e);
setExceptionResults(auths.size(), e, response);
} catch (IOException e) {
- LOG.error(e);
+ LOG.error(e.toString(), e);
setExceptionResults(auths.size(), e, response);
}
}
@@ -951,7 +951,7 @@ public class VisibilityController implements MasterCoprocessor, RegionCoprocesso
LOG.error("User is not having required permissions to clear authorization", e);
setExceptionResults(auths.size(), e, response);
} catch (IOException e) {
- LOG.error(e);
+ LOG.error(e.toString(), e);
setExceptionResults(auths.size(), e, response);
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelServiceManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelServiceManager.java
index 16eff84310..74531b92ce 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelServiceManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelServiceManager.java
@@ -19,9 +19,9 @@ package org.apache.hadoop.hbase.security.visibility;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.ReflectionUtils;
@@ -31,7 +31,7 @@ import org.apache.hadoop.util.ReflectionUtils;
@InterfaceAudience.Private
public class VisibilityLabelServiceManager {
- private static final Log LOG = LogFactory.getLog(VisibilityLabelServiceManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(VisibilityLabelServiceManager.class);
public static final String VISIBILITY_LABEL_SERVICE_CLASS =
"hbase.regionserver.visibility.label.service.class";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsCache.java
index 85bc0d5173..438b616947 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsCache.java
@@ -27,8 +27,6 @@ import java.util.Map;
import java.util.Set;
import java.util.concurrent.locks.ReentrantReadWriteLock;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.AuthUtil;
import org.apache.yetus.audience.InterfaceAudience;
@@ -39,6 +37,8 @@ import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.Visibil
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Maintains the cache for visibility labels and also uses the zookeeper to update the labels in the
@@ -48,7 +48,7 @@ import org.apache.zookeeper.KeeperException;
@InterfaceAudience.Private
public class VisibilityLabelsCache implements VisibilityLabelOrdinalProvider {
- private static final Log LOG = LogFactory.getLog(VisibilityLabelsCache.class);
+ private static final Logger LOG = LoggerFactory.getLogger(VisibilityLabelsCache.class);
private static final List EMPTY_LIST = Collections.emptyList();
private static final Set EMPTY_SET = Collections.emptySet();
private static VisibilityLabelsCache instance;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityNewVersionBehaivorTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityNewVersionBehaivorTracker.java
index dc467d6f91..f6ed72ff74 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityNewVersionBehaivorTracker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityNewVersionBehaivorTracker.java
@@ -30,21 +30,21 @@ import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.regionserver.querymatcher.NewVersionBehaviorTracker;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Similar to MvccSensitiveTracker but tracks the visibility expression also before
* deciding if a Cell can be considered deleted
*/
public class VisibilityNewVersionBehaivorTracker extends NewVersionBehaviorTracker {
-
- private static final Log LOG = LogFactory.getLog(VisibilityNewVersionBehaivorTracker.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(VisibilityNewVersionBehaivorTracker.class);
public VisibilityNewVersionBehaivorTracker(NavigableSet columns,
CellComparator cellComparator, int minVersion, int maxVersion, int resultMaxVersions,
@@ -85,6 +85,7 @@ public class VisibilityNewVersionBehaivorTracker extends NewVersionBehaviorTrack
mvccCountingMap.put(Long.MAX_VALUE, new TreeSet());
}
+ @Override
protected VisibilityDeleteVersionsNode getDeepCopy() {
VisibilityDeleteVersionsNode node = new VisibilityDeleteVersionsNode(ts, mvcc, tagInfo);
for (Map.Entry> e : deletesMap.entrySet()) {
@@ -96,6 +97,7 @@ public class VisibilityNewVersionBehaivorTracker extends NewVersionBehaviorTrack
return node;
}
+ @Override
public void addVersionDelete(Cell cell) {
SortedMap set = deletesMap.get(cell.getTimestamp());
if (set == null) {
@@ -196,6 +198,7 @@ public class VisibilityNewVersionBehaivorTracker extends NewVersionBehaviorTrack
return DeleteResult.NOT_DELETED;
}
+ @Override
protected void resetInternal() {
delFamMap.put(Long.MAX_VALUE,
new VisibilityDeleteVersionsNode(Long.MIN_VALUE, Long.MAX_VALUE, new TagInfo()));
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.java
index 14507a458f..cd495ce442 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.java
@@ -24,8 +24,6 @@ import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.PrivateCellUtil;
@@ -36,13 +34,14 @@ import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
import org.apache.hadoop.hbase.replication.WALEntryFilter;
import org.apache.hadoop.hbase.wal.WAL.Entry;
import org.apache.hadoop.hbase.wal.WALEdit;
-import org.apache.hadoop.hbase.wal.WALKeyImpl;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class VisibilityReplicationEndpoint implements ReplicationEndpoint {
- private static final Log LOG = LogFactory.getLog(VisibilityReplicationEndpoint.class);
+ private static final Logger LOG = LoggerFactory.getLogger(VisibilityReplicationEndpoint.class);
private final ReplicationEndpoint delegator;
private final VisibilityLabelService visibilityLabelsService;
@@ -111,7 +110,7 @@ public class VisibilityReplicationEndpoint implements ReplicationEndpoint {
newEdit.add(cell);
}
}
- newEntries.add(new Entry(((WALKeyImpl)entry.getKey()), newEdit));
+ newEntries.add(new Entry((entry.getKey()), newEdit));
}
replicateContext.setEntries(newEntries);
return delegator.replicate(replicateContext);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java
index da0938b22d..6b9ac7449a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java
@@ -23,9 +23,9 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.querymatcher.ScanDeleteTracker;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
@@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.util.Triple;
@InterfaceAudience.Private
public class VisibilityScanDeleteTracker extends ScanDeleteTracker {
- private static final Log LOG = LogFactory.getLog(VisibilityScanDeleteTracker.class);
+ private static final Logger LOG = LoggerFactory.getLogger(VisibilityScanDeleteTracker.class);
/**
* This tag is used for the DELETE cell which has no visibility label.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
index 3db8d0ebf1..c177c2b09e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
@@ -35,8 +35,6 @@ import java.util.Optional;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
@@ -65,6 +63,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.SimpleMutableByteRange;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Utility method to support visibility
@@ -72,7 +72,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class VisibilityUtils {
- private static final Log LOG = LogFactory.getLog(VisibilityUtils.class);
+ private static final Logger LOG = LoggerFactory.getLogger(VisibilityUtils.class);
public static final String VISIBILITY_LABEL_GENERATOR_CLASS =
"hbase.regionserver.scan.visibility.label.generator.class";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ZKVisibilityLabelWatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ZKVisibilityLabelWatcher.java
index 5cc244cd69..d428ff4291 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ZKVisibilityLabelWatcher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ZKVisibilityLabelWatcher.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.security.visibility;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.zookeeper.ZKListener;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
@@ -28,6 +26,8 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A zk watcher that watches the labels table znode. This would create a znode
@@ -36,7 +36,7 @@ import org.apache.zookeeper.KeeperException;
@InterfaceAudience.Private
public class ZKVisibilityLabelWatcher extends ZKListener {
- private static final Log LOG = LogFactory.getLog(ZKVisibilityLabelWatcher.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ZKVisibilityLabelWatcher.class);
private static final String VISIBILITY_LABEL_ZK_PATH = "zookeeper.znode.visibility.label.parent";
private static final String DEFAULT_VISIBILITY_LABEL_NODE = "visibility/labels";
private static final String VISIBILITY_USER_AUTHS_ZK_PATH =
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
index e08d547d6b..99690de203 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
@@ -34,8 +34,6 @@ import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.ThreadPoolExecutor;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -66,7 +64,8 @@ import org.apache.hadoop.hbase.util.ModifyRegionUtils;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.io.IOUtils;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
@@ -115,7 +114,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot
*/
@InterfaceAudience.Private
public class RestoreSnapshotHelper {
- private static final Log LOG = LogFactory.getLog(RestoreSnapshotHelper.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RestoreSnapshotHelper.class);
private final Map regionsMap = new TreeMap<>(Bytes.BYTES_COMPARATOR);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java
index 61a4a85ed0..d5cab63300 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java
@@ -21,9 +21,6 @@ import java.io.IOException;
import java.security.PrivilegedExceptionAction;
import java.util.Collections;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -32,18 +29,22 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
-import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.access.AccessControlLists;
import org.apache.hadoop.hbase.security.access.ShadedAccessControlUtil;
import org.apache.hadoop.hbase.security.access.TablePermission;
-import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.security.User;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.FSUtils;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
/**
* Utility class to help manage {@link SnapshotDescription SnapshotDesriptions}.
@@ -97,7 +98,7 @@ public final class SnapshotDescriptionUtils {
}
}
- private static final Log LOG = LogFactory.getLog(SnapshotDescriptionUtils.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SnapshotDescriptionUtils.class);
/**
* Version of the fs layout for a snapshot. Future snapshots may have different file layouts,
* which we may need to read in differently.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
index c76155c6de..7d7e526b6d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
@@ -34,8 +34,6 @@ import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -49,7 +47,8 @@ import org.apache.hadoop.hbase.util.AbstractHBaseTool;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
@@ -65,7 +64,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot
*/
@InterfaceAudience.Public
public final class SnapshotInfo extends AbstractHBaseTool {
- private static final Log LOG = LogFactory.getLog(SnapshotInfo.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SnapshotInfo.class);
static final class Options {
static final Option SNAPSHOT = new Option(null, "snapshot", true, "Snapshot to examine.");
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
index 1a7c7f017a..b334585454 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
@@ -28,8 +28,6 @@ import java.util.Map;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -51,7 +49,8 @@ import org.apache.hadoop.hbase.util.FSTableDescriptors;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
@@ -69,7 +68,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot
*/
@InterfaceAudience.Private
public final class SnapshotManifest {
- private static final Log LOG = LogFactory.getLog(SnapshotManifest.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SnapshotManifest.class);
public static final String SNAPSHOT_MANIFEST_SIZE_LIMIT_CONF_KEY = "snapshot.manifest.size.limit";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java
index 61cbbd1172..7dfeab39a4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java
@@ -28,8 +28,6 @@ import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorCompletionService;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -40,7 +38,8 @@ import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
@@ -57,7 +56,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot
*/
@InterfaceAudience.Private
public final class SnapshotManifestV1 {
- private static final Log LOG = LogFactory.getLog(SnapshotManifestV1.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SnapshotManifestV1.class);
public static final int DESCRIPTOR_VERSION = 0;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java
index 561eb77638..5b7152aad5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java
@@ -27,8 +27,6 @@ import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorCompletionService;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -40,7 +38,8 @@ import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
@@ -58,7 +57,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot
*/
@InterfaceAudience.Private
public final class SnapshotManifestV2 {
- private static final Log LOG = LogFactory.getLog(SnapshotManifestV2.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SnapshotManifestV2.class);
public static final int DESCRIPTOR_VERSION = 2;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java
index 0cca62fa33..b157d01e19 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java
@@ -29,8 +29,6 @@ import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.ExecutorService;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -42,7 +40,8 @@ import org.apache.hadoop.hbase.mob.MobUtils;
import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
import org.apache.hadoop.hbase.util.HFileArchiveUtil;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
@@ -52,7 +51,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot
*/
@InterfaceAudience.Private
public final class SnapshotReferenceUtil {
- private static final Log LOG = LogFactory.getLog(SnapshotReferenceUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SnapshotReferenceUtil.class);
public interface StoreFileVisitor {
void storeFile(final RegionInfo regionInfo, final String familyName,
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
index e942a020e0..969a7579ba 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
@@ -49,8 +49,6 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang3.time.StopWatch;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.AuthUtil;
import org.apache.hadoop.hbase.ChoreService;
@@ -98,7 +96,8 @@ import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.client.ConnectStringParser;
import org.apache.zookeeper.data.Stat;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
/**
@@ -540,10 +539,10 @@ public final class Canary implements Tool {
LOG.debug("The targeted table was disabled. Assuming success.");
} catch (DoNotRetryIOException dnrioe) {
sink.publishReadFailure(tableName.getNameAsString(), serverName);
- LOG.error(dnrioe);
+ LOG.error(dnrioe.toString(), dnrioe);
} catch (IOException e) {
sink.publishReadFailure(tableName.getNameAsString(), serverName);
- LOG.error(e);
+ LOG.error(e.toString(), e);
} finally {
if (table != null) {
try {
@@ -571,7 +570,7 @@ public final class Canary implements Tool {
private static final long DEFAULT_TIMEOUT = 600000; // 10 mins
private static final int MAX_THREADS_NUM = 16; // #threads to contact regions
- private static final Log LOG = LogFactory.getLog(Canary.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Canary.class);
public static final TableName DEFAULT_WRITE_TABLE_NAME = TableName.valueOf(
NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, "canary");
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java
index c457e224da..b5eea9cd0c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java
@@ -50,8 +50,6 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import org.apache.commons.lang3.mutable.MutableInt;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileStatus;
@@ -63,6 +61,8 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ClientServiceCallable;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
@@ -109,7 +109,7 @@ import org.apache.hadoop.util.ToolRunner;
@InterfaceAudience.Public
public class LoadIncrementalHFiles extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(LoadIncrementalHFiles.class);
+ private static final Logger LOG = LoggerFactory.getLogger(LoadIncrementalHFiles.class);
public static final String NAME = "completebulkload";
static final String RETRY_ON_IO_EXCEPTION = "hbase.bulkload.retries.retryOnIOException";
@@ -328,7 +328,7 @@ public class LoadIncrementalHFiles extends Configured implements Tool {
if (queue.isEmpty()) {
LOG.warn(
"Bulk load operation did not find any files to load in " + "directory " + hfofDir != null
- ? hfofDir.toUri()
+ ? hfofDir.toUri().toString()
: "" + ". Does it contain files in " +
"subdirectories that correspond to column family names?");
return Collections.emptyMap();
@@ -877,7 +877,7 @@ public class LoadIncrementalHFiles extends Configured implements Tool {
for (LoadQueueItem q : queue) {
err.append(" ").append(q.getFilePath()).append('\n');
}
- LOG.error(err);
+ LOG.error(err.toString());
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/WriteSinkCoprocessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/WriteSinkCoprocessor.java
index 60fd22d852..738ffc28c1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/WriteSinkCoprocessor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/WriteSinkCoprocessor.java
@@ -18,8 +18,6 @@
*/
package org.apache.hadoop.hbase.tool;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
@@ -28,6 +26,8 @@ import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.RegionObserver;
import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
import org.apache.hadoop.hbase.regionserver.OperationStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Optional;
@@ -61,7 +61,7 @@ import java.util.concurrent.atomic.AtomicLong;
*
*/
public class WriteSinkCoprocessor implements RegionCoprocessor, RegionObserver {
- private static final Log LOG = LogFactory.getLog(WriteSinkCoprocessor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(WriteSinkCoprocessor.class);
private final AtomicLong ops = new AtomicLong();
@Override
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java
index b4851bf105..89ff5b7222 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.util;
import java.io.DataInput;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
@@ -31,6 +29,8 @@ import org.apache.hadoop.hbase.io.hfile.CompoundBloomFilterWriter;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Handles Bloom filter initialization based on configuration and serialized metadata in the reader
@@ -39,8 +39,8 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public final class BloomFilterFactory {
- private static final Log LOG =
- LogFactory.getLog(BloomFilterFactory.class.getName());
+ private static final Logger LOG =
+ LoggerFactory.getLogger(BloomFilterFactory.class.getName());
/** This class should not be instantiated. */
private BloomFilterFactory() {}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java
index dbc7afa744..b6af8a5507 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java
@@ -22,11 +22,11 @@ import java.io.IOException;
import java.util.Locale;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -52,7 +52,7 @@ import org.apache.hadoop.io.compress.Compressor;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
@InterfaceStability.Evolving
public class CompressionTest {
- private static final Log LOG = LogFactory.getLog(CompressionTest.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CompressionTest.class);
public static boolean testCompression(String codec) {
codec = codec.toLowerCase(Locale.ROOT);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
index dadb615498..7b9f021313 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
@@ -23,13 +23,14 @@ import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.locks.Lock;
-import org.apache.commons.logging.Log;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ChoreService;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -38,7 +39,6 @@ import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.commons.logging.LogFactory;
/**
* A utility to store user specific HConnections in memory.
@@ -48,7 +48,7 @@ import org.apache.commons.logging.LogFactory;
*/
@InterfaceAudience.Private
public class ConnectionCache {
- private static final Log LOG = LogFactory.getLog(ConnectionCache.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ConnectionCache.class);
private final Map connections = new ConcurrentHashMap<>();
private final KeyLocker locker = new KeyLocker<>();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/DirectMemoryUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/DirectMemoryUtils.java
index 2cc4f44e2c..6c6a09d92f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/DirectMemoryUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/DirectMemoryUtils.java
@@ -32,11 +32,10 @@ import javax.management.MBeanServer;
import javax.management.MalformedObjectNameException;
import javax.management.ObjectName;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
/**
@@ -45,7 +44,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class DirectMemoryUtils {
- private static final Log LOG = LogFactory.getLog(DirectMemoryUtils.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DirectMemoryUtils.class);
private static final String MEMORY_USED = "MemoryUsed";
private static final MBeanServer BEAN_SERVER;
private static final ObjectName NIO_DIRECT_POOL;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/EncryptionTest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/EncryptionTest.java
index e6b8c0aa34..2687d3b033 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/EncryptionTest.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/EncryptionTest.java
@@ -24,10 +24,10 @@ import java.io.IOException;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.io.crypto.DefaultCipherProvider;
@@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.security.EncryptionUtil;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
public class EncryptionTest {
- private static final Log LOG = LogFactory.getLog(EncryptionTest.class);
+ private static final Logger LOG = LoggerFactory.getLogger(EncryptionTest.class);
static final Map keyProviderResults = new ConcurrentHashMap<>();
static final Map cipherProviderResults = new ConcurrentHashMap<>();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java
index 8e13f4015d..cdc0aad3b5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java
@@ -24,22 +24,22 @@ import java.io.InterruptedIOException;
import java.lang.reflect.Method;
import java.net.InetSocketAddress;
import java.net.URI;
+import java.util.Collection;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
-import java.util.Collection;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.yetus.audience.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
/**
* Implementation for hdfs
@@ -47,7 +47,7 @@ import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class FSHDFSUtils extends FSUtils {
- private static final Log LOG = LogFactory.getLog(FSHDFSUtils.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FSHDFSUtils.class);
private static Class dfsUtilClazz;
private static Method getNNAddressesMethod;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSMapRUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSMapRUtils.java
index bb7b1f3a29..4207f391db 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSMapRUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSMapRUtils.java
@@ -21,19 +21,19 @@ package org.apache.hadoop.hbase.util;
import java.io.IOException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
/**
* MapR implementation.
*/
@InterfaceAudience.Private
public class FSMapRUtils extends FSUtils {
- private static final Log LOG = LogFactory.getLog(FSMapRUtils.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FSMapRUtils.class);
public void recoverFileLease(final FileSystem fs, final Path p,
Configuration conf, CancelableProgressable reporter) throws IOException {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSRegionScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSRegionScanner.java
index 66ac3956d6..f258e6cd93 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSRegionScanner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSRegionScanner.java
@@ -23,9 +23,10 @@ import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -39,7 +40,7 @@ import org.apache.hadoop.hbase.util.FSUtils;
*/
@InterfaceAudience.Private
class FSRegionScanner implements Runnable {
- static private final Log LOG = LogFactory.getLog(FSRegionScanner.class);
+ static private final Logger LOG = LoggerFactory.getLogger(FSRegionScanner.class);
private Path regionPath;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
index e1bc189e3f..5627e9a045 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
@@ -30,8 +30,6 @@ import java.util.regex.Pattern;
import edu.umd.cs.findbugs.annotations.Nullable;
import org.apache.commons.lang3.NotImplementedException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -40,6 +38,8 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
@@ -73,7 +73,7 @@ import org.apache.hadoop.hbase.TableName;
*/
@InterfaceAudience.Private
public class FSTableDescriptors implements TableDescriptors {
- private static final Log LOG = LogFactory.getLog(FSTableDescriptors.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FSTableDescriptors.class);
private final FileSystem fs;
private final Path rootdir;
private final boolean fsreadonly;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
index 81fcaf201f..1620fd8bee 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
@@ -55,8 +55,6 @@ import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataInputStream;
@@ -74,6 +72,8 @@ import org.apache.hadoop.hbase.HDFSBlocksDistribution;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.io.HFileLink;
@@ -100,7 +100,7 @@ import org.apache.hadoop.util.StringUtils;
*/
@InterfaceAudience.Private
public abstract class FSUtils extends CommonFSUtils {
- private static final Log LOG = LogFactory.getLog(FSUtils.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FSUtils.class);
private static final String THREAD_POOLSIZE = "hbase.client.localityCheck.threadPoolSize";
private static final int DEFAULT_THREAD_POOLSIZE = 2;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSVisitor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSVisitor.java
index 353f1c7a25..24cd223471 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSVisitor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSVisitor.java
@@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.util;
import java.io.IOException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -34,7 +34,7 @@ import org.apache.hadoop.fs.PathFilter;
*/
@InterfaceAudience.Private
public final class FSVisitor {
- private static final Log LOG = LogFactory.getLog(FSVisitor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FSVisitor.class);
public interface StoreFileVisitor {
void storeFile(final String region, final String family, final String hfileName)
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
index aab3b36e67..5e7d7288a1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
@@ -38,6 +38,7 @@ import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
+import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.SortedMap;
@@ -61,8 +62,6 @@ import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -110,6 +109,7 @@ import org.apache.hadoop.hbase.io.FileLink;
import org.apache.hadoop.hbase.io.HFileLink;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.master.MasterFileSystem;
import org.apache.hadoop.hbase.master.RegionState;
import org.apache.hadoop.hbase.regionserver.HRegion;
@@ -141,7 +141,8 @@ import org.apache.hadoop.util.ToolRunner;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
import org.apache.zookeeper.KeeperException;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList;
@@ -223,7 +224,7 @@ public class HBaseFsck extends Configured implements Closeable {
/**********************
* Internal resources
**********************/
- private static final Log LOG = LogFactory.getLog(HBaseFsck.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());
private ClusterStatus status;
private ClusterConnection connection;
private Admin admin;
@@ -805,7 +806,7 @@ public class HBaseFsck extends Configured implements Closeable {
cleanupHbckZnode();
unlockHbck();
} catch (Exception io) {
- LOG.warn(io);
+ LOG.warn(io.toString(), io);
} finally {
if (zkw != null) {
zkw.close();
@@ -907,11 +908,11 @@ public class HBaseFsck extends Configured implements Closeable {
errors.reportError(ERROR_CODE.BOUNDARIES_ERROR, "Found issues with regions boundaries",
tablesInfo.get(regionInfo.getTable()));
LOG.warn("Region's boundaries not aligned between stores and META for:");
- LOG.warn(currentRegionBoundariesInformation);
+ LOG.warn(Objects.toString(currentRegionBoundariesInformation));
}
}
} catch (IOException e) {
- LOG.error(e);
+ LOG.error(e.toString(), e);
}
}
@@ -1597,8 +1598,8 @@ public class HBaseFsck extends Configured implements Closeable {
// populate meta
List puts = generatePuts(tablesInfo);
if (puts == null) {
- LOG.fatal("Problem encountered when creating new hbase:meta entries. " +
- "You may need to restore the previously sidelined hbase:meta");
+ LOG.error(HBaseMarkers.FATAL, "Problem encountered when creating new hbase:meta "
+ + "entries. You may need to restore the previously sidelined hbase:meta");
return false;
}
meta.batchMutate(puts.toArray(new Put[puts.size()]), HConstants.NO_NONCE, HConstants.NO_NONCE);
@@ -1791,9 +1792,9 @@ public class HBaseFsck extends Configured implements Closeable {
try {
sidelineTable(fs, TableName.META_TABLE_NAME, hbaseDir, backupDir);
} catch (IOException e) {
- LOG.fatal("... failed to sideline meta. Currently in inconsistent state. To restore "
- + "try to rename hbase:meta in " + backupDir.getName() + " to "
- + hbaseDir.getName() + ".", e);
+ LOG.error(HBaseMarkers.FATAL, "... failed to sideline meta. Currently in "
+ + "inconsistent state. To restore try to rename hbase:meta in " +
+ backupDir.getName() + " to " + hbaseDir.getName() + ".", e);
throw e; // throw original exception
}
return backupDir;
@@ -1882,7 +1883,7 @@ public class HBaseFsck extends Configured implements Closeable {
* Record the location of the hbase:meta region as found in ZooKeeper.
*/
private boolean recordMetaRegion() throws IOException {
- RegionLocations rl = ((ClusterConnection)connection).locateRegion(TableName.META_TABLE_NAME,
+ RegionLocations rl = connection.locateRegion(TableName.META_TABLE_NAME,
HConstants.EMPTY_START_ROW, false, false);
if (rl == null) {
errors.reportError(ERROR_CODE.NULL_META_REGION,
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsckRepair.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsckRepair.java
index afb6c5b0e1..b8811c7ce9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsckRepair.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsckRepair.java
@@ -24,8 +24,6 @@ import java.util.EnumSet;
import java.util.List;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ClusterStatus.Option;
@@ -46,6 +44,8 @@ import org.apache.hadoop.hbase.master.ServerManager;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This class contains helper methods that repair parts of hbase's filesystem
@@ -53,7 +53,7 @@ import org.apache.zookeeper.KeeperException;
*/
@InterfaceAudience.Private
public class HBaseFsckRepair {
- private static final Log LOG = LogFactory.getLog(HBaseFsckRepair.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HBaseFsckRepair.class);
/**
* Fix multiple assignment by doing silent closes on each RS hosting the region
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JVMClusterUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JVMClusterUtil.java
index b88c0e63f6..00410af2fa 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JVMClusterUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JVMClusterUtil.java
@@ -24,9 +24,9 @@ import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CoordinatedStateManager;
import org.apache.hadoop.hbase.master.HMaster;
@@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.regionserver.HRegionServer;
*/
@InterfaceAudience.Private
public class JVMClusterUtil {
- private static final Log LOG = LogFactory.getLog(JVMClusterUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(JVMClusterUtil.class);
/**
* Datastructure to hold RegionServer Thread and RegionServer instance
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JvmPauseMonitor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JvmPauseMonitor.java
index 765edf93c4..202b9fb2d0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JvmPauseMonitor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JvmPauseMonitor.java
@@ -24,9 +24,9 @@ import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.metrics.JvmPauseMonitorSource;
import org.apache.hadoop.conf.Configuration;
@@ -51,7 +51,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
*/
@InterfaceAudience.Private
public class JvmPauseMonitor {
- private static final Log LOG = LogFactory.getLog(JvmPauseMonitor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(JvmPauseMonitor.class);
/** The target sleep time */
private static final long SLEEP_INTERVAL_MS = 500;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ModifyRegionUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ModifyRegionUtils.java
index fe33c24d99..1c860b42e9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ModifyRegionUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ModifyRegionUtils.java
@@ -32,8 +32,6 @@ import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HConstants;
@@ -42,13 +40,15 @@ import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Utility methods for interacting with the regions.
*/
@InterfaceAudience.Private
public abstract class ModifyRegionUtils {
- private static final Log LOG = LogFactory.getLog(ModifyRegionUtils.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ModifyRegionUtils.class);
private ModifyRegionUtils() {
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/MultiHConnection.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/MultiHConnection.java
index 1f19848317..58057932bd 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/MultiHConnection.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/MultiHConnection.java
@@ -27,12 +27,12 @@ import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.ClusterConnection;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.client.coprocessor.Batch;
*/
@InterfaceAudience.Private
public class MultiHConnection {
- private static final Log LOG = LogFactory.getLog(MultiHConnection.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MultiHConnection.class);
private Connection[] connections;
private final Object connectionsLock = new Object();
private final int noOfConnections;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionMover.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionMover.java
index 2dc1fe9c93..711507b0f6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionMover.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionMover.java
@@ -43,8 +43,6 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.apache.commons.cli.CommandLine;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClusterStatus.Option;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -64,6 +62,8 @@ import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tool for loading/unloading regions to/from given regionserver This tool can be run from Command
@@ -82,7 +82,7 @@ public class RegionMover extends AbstractHBaseTool {
public static final int DEFAULT_MOVE_RETRIES_MAX = 5;
public static final int DEFAULT_MOVE_WAIT_MAX = 60;
public static final int DEFAULT_SERVERSTART_WAIT_MAX = 180;
- static final Log LOG = LogFactory.getLog(RegionMover.class);
+ static final Logger LOG = LoggerFactory.getLogger(RegionMover.class);
private RegionMoverBuilder rmbuilder;
private boolean ack = true;
private int maxthreads = 1;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitCalculator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitCalculator.java
index 91be6e8498..e41882fb37 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitCalculator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitCalculator.java
@@ -26,9 +26,9 @@ import java.util.Map.Entry;
import java.util.TreeMap;
import java.util.TreeSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
@@ -53,7 +53,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.TreeMultimap;
*/
@InterfaceAudience.Private
public class RegionSplitCalculator {
- private static final Log LOG = LogFactory.getLog(RegionSplitCalculator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionSplitCalculator.class);
private final Comparator rangeCmp;
/**
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java
index 06bccd13ab..5f480a5eee 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java
@@ -37,8 +37,6 @@ import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -56,6 +54,8 @@ import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ClusterConnection;
import org.apache.hadoop.hbase.client.Connection;
@@ -145,7 +145,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
*/
@InterfaceAudience.Private
public class RegionSplitter {
- private static final Log LOG = LogFactory.getLog(RegionSplitter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionSplitter.class);
/**
* A generic interface for the RegionSplitter code to use for all it's
@@ -434,7 +434,7 @@ public class RegionSplitter {
* Alternative getCurrentNrHRS which is no longer available.
* @param connection
* @return Rough count of regionservers out on cluster.
- * @throws IOException
+ * @throws IOException
*/
private static int getRegionServerCount(final Connection connection) throws IOException {
try (Admin admin = connection.getAdmin()) {
@@ -729,7 +729,7 @@ public class RegionSplitter {
}
} catch (NoServerForRegionException nsfre) {
// NSFRE will occur if the old hbase:meta entry has no server assigned
- LOG.info(nsfre);
+ LOG.info(nsfre.toString(), nsfre);
logicalSplitting.add(region);
continue;
}
@@ -785,7 +785,7 @@ public class RegionSplitter {
* @param conf
* @param tableName
* @return A Pair where first item is table dir and second is the split file.
- * @throws IOException
+ * @throws IOException
*/
private static Pair getTableDirAndSplitFile(final Configuration conf,
final TableName tableName)
@@ -803,7 +803,7 @@ public class RegionSplitter {
getTableDirAndSplitFile(connection.getConfiguration(), tableName);
Path tableDir = tableDirAndSplitFile.getFirst();
Path splitFile = tableDirAndSplitFile.getSecond();
-
+
FileSystem fs = tableDir.getFileSystem(connection.getConfiguration());
// Using strings because (new byte[]{0}).equals(new byte[]{0}) == false
@@ -949,6 +949,7 @@ public class RegionSplitter {
this.rowComparisonLength = lastRow.length();
}
+ @Override
public byte[] split(byte[] start, byte[] end) {
BigInteger s = convertToBigInteger(start);
BigInteger e = convertToBigInteger(end);
@@ -956,6 +957,7 @@ public class RegionSplitter {
return convertToByte(split2(s, e));
}
+ @Override
public byte[][] split(int n) {
Preconditions.checkArgument(lastRowInt.compareTo(firstRowInt) > 0,
"last row (%s) is configured less than first row (%s)", lastRow,
@@ -1009,19 +1011,23 @@ public class RegionSplitter {
}
}
+ @Override
public byte[] firstRow() {
return convertToByte(firstRowInt);
}
+ @Override
public byte[] lastRow() {
return convertToByte(lastRowInt);
}
+ @Override
public void setFirstRow(String userInput) {
firstRow = userInput;
firstRowInt = new BigInteger(firstRow, radix);
}
+ @Override
public void setLastRow(String userInput) {
lastRow = userInput;
lastRowInt = new BigInteger(lastRow, radix);
@@ -1029,14 +1035,17 @@ public class RegionSplitter {
rowComparisonLength = lastRow.length();
}
+ @Override
public byte[] strToRow(String in) {
return convertToByte(new BigInteger(in, radix));
}
+ @Override
public String rowToStr(byte[] row) {
return Bytes.toStringBinary(row);
}
+ @Override
public String separator() {
return " ";
}
@@ -1130,6 +1139,7 @@ public class RegionSplitter {
byte[] firstRowBytes = ArrayUtils.EMPTY_BYTE_ARRAY;
byte[] lastRowBytes =
new byte[] {xFF, xFF, xFF, xFF, xFF, xFF, xFF, xFF};
+ @Override
public byte[] split(byte[] start, byte[] end) {
return Bytes.split(start, end, 1)[1];
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerCommandLine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerCommandLine.java
index 4175526ec6..83ec5ffc63 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerCommandLine.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerCommandLine.java
@@ -26,9 +26,9 @@ import java.util.Locale;
import java.util.Map.Entry;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -40,7 +40,7 @@ import org.apache.hadoop.util.ToolRunner;
*/
@InterfaceAudience.Private
public abstract class ServerCommandLine extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(ServerCommandLine.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ServerCommandLine.class);
@SuppressWarnings("serial")
private static final Set DEFAULT_SKIP_WORDS = new HashSet() {
{
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
index 9b61b8b410..fe514d8c1c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.util;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -37,13 +35,15 @@ import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
import org.apache.hadoop.hbase.replication.regionserver.RegionReplicaReplicationEndpoint;
import org.apache.hadoop.hbase.zookeeper.ZKConfig;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Similar to {@link RegionReplicaUtil} but for the server side
*/
public class ServerRegionReplicaUtil extends RegionReplicaUtil {
- private static final Log LOG = LogFactory.getLog(ServerRegionReplicaUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ServerRegionReplicaUtil.class);
/**
* Whether asynchronous WAL replication to the secondary region replicas is enabled or not.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ZKDataMigrator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ZKDataMigrator.java
index 11327e8baa..b22b4ff40b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ZKDataMigrator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ZKDataMigrator.java
@@ -22,8 +22,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.TableState;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -34,6 +32,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* utlity method to migrate zookeeper data across HBase versions.
@@ -41,7 +41,7 @@ import org.apache.zookeeper.KeeperException;
@InterfaceAudience.Private
public class ZKDataMigrator {
- private static final Log LOG = LogFactory.getLog(ZKDataMigrator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ZKDataMigrator.class);
/**
* Method for table states migration.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.java
index 44bbb38ca5..e937fa529a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.java
@@ -31,9 +31,9 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -60,7 +60,7 @@ import org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter;
*/
@InterfaceAudience.Private
public class HFileCorruptionChecker {
- private static final Log LOG = LogFactory.getLog(HFileCorruptionChecker.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HFileCorruptionChecker.class);
final Configuration conf;
final FileSystem fs;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRepair.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRepair.java
index c208d8aa3a..534b948bfe 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRepair.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRepair.java
@@ -20,10 +20,10 @@ package org.apache.hadoop.hbase.util.hbck;
import java.io.IOException;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -46,7 +46,7 @@ import org.apache.hadoop.io.MultipleIOException;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
@InterfaceStability.Evolving
public class OfflineMetaRepair {
- private static final Log LOG = LogFactory.getLog(OfflineMetaRepair.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(OfflineMetaRepair.class.getName());
protected static void printUsageAndExit() {
StringBuilder sb = new StringBuilder();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java
index aba13c658f..8bd9a3086a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java
@@ -25,8 +25,6 @@ import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
@@ -35,6 +33,8 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;
import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
import org.apache.hadoop.hbase.util.CancelableProgressable;
@@ -56,7 +56,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
@InterfaceStability.Evolving
public abstract class AbstractFSWALProvider> implements WALProvider {
- private static final Log LOG = LogFactory.getLog(AbstractFSWALProvider.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AbstractFSWALProvider.class);
/** Separate old log into different dir by regionserver name **/
public static final String SEPARATE_OLDLOGDIR = "hbase.separate.oldlogdir.by.regionserver";
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AsyncFSWALProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AsyncFSWALProvider.java
index 5cb01899db..8bb1802e6b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AsyncFSWALProvider.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AsyncFSWALProvider.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.wal;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -31,7 +29,8 @@ import org.apache.hadoop.hbase.util.CommonFSUtils.StreamLacksCapabilityException
import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
import org.apache.hadoop.hbase.shaded.io.netty.channel.Channel;
import org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoopGroup;
@@ -46,7 +45,7 @@ import org.apache.hadoop.hbase.shaded.io.netty.util.concurrent.DefaultThreadFact
@InterfaceStability.Evolving
public class AsyncFSWALProvider extends AbstractFSWALProvider {
- private static final Log LOG = LogFactory.getLog(AsyncFSWALProvider.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AsyncFSWALProvider.class);
// Only public so classes back in regionserver.wal can access
public interface AsyncWriter extends WALProvider.AsyncWriter {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DisabledWALProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DisabledWALProvider.java
index cedf3509f5..280d95fec3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DisabledWALProvider.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DisabledWALProvider.java
@@ -26,8 +26,6 @@ import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
@@ -38,6 +36,8 @@ import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
import org.apache.hadoop.hbase.regionserver.wal.WALCoprocessorHost;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
// imports for things that haven't moved from regionserver.wal yet.
@@ -50,7 +50,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
class DisabledWALProvider implements WALProvider {
- private static final Log LOG = LogFactory.getLog(DisabledWALProvider.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DisabledWALProvider.class);
WAL disabled;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/FSHLogProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/FSHLogProvider.java
index b72e66841e..14505a8a9c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/FSHLogProvider.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/FSHLogProvider.java
@@ -20,14 +20,14 @@ package org.apache.hadoop.hbase.wal;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
// imports for things that haven't moved from regionserver.wal yet.
import org.apache.hadoop.hbase.regionserver.wal.FSHLog;
import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogWriter;
@@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.util.CommonFSUtils;
@InterfaceStability.Evolving
public class FSHLogProvider extends AbstractFSWALProvider {
- private static final Log LOG = LogFactory.getLog(FSHLogProvider.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FSHLogProvider.class);
// Only public so classes back in regionserver.wal can access
public interface Writer extends WALProvider.Writer {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/RegionGroupingProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/RegionGroupingProvider.java
index ab3a7d9419..b8c9484ab3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/RegionGroupingProvider.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/RegionGroupingProvider.java
@@ -28,10 +28,10 @@ import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
// imports for classes still in regionserver.wal
import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
import org.apache.hadoop.hbase.util.Bytes;
@@ -55,7 +55,7 @@ import org.apache.hadoop.hbase.util.IdLock;
*/
@InterfaceAudience.Private
public class RegionGroupingProvider implements WALProvider {
- private static final Log LOG = LogFactory.getLog(RegionGroupingProvider.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionGroupingProvider.class);
/**
* Map identifiers to a group number.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALEdit.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALEdit.java
index f5b611bae8..c909e905da 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALEdit.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALEdit.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.wal;
import java.io.IOException;
import java.util.ArrayList;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@@ -35,7 +33,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor;
@@ -55,7 +54,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDe
@InterfaceAudience.LimitedPrivate({ HBaseInterfaceAudience.REPLICATION,
HBaseInterfaceAudience.COPROC })
public class WALEdit implements HeapSize {
- private static final Log LOG = LogFactory.getLog(WALEdit.class);
+ private static final Logger LOG = LoggerFactory.getLogger(WALEdit.class);
// TODO: Get rid of this; see HBASE-8457
public static final byte [] METAFAMILY = Bytes.toBytes("METAFAMILY");
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java
index 5855419683..0628f8652f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java
@@ -29,12 +29,12 @@ import java.util.List;
import java.util.OptionalLong;
import java.util.concurrent.atomic.AtomicReference;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
// imports for things that haven't moved from regionserver.wal yet.
import org.apache.hadoop.hbase.regionserver.wal.MetricsWAL;
import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader;
@@ -67,7 +67,7 @@ import org.apache.hadoop.hbase.wal.WALProvider.Writer;
@InterfaceAudience.Private
public class WALFactory implements WALFileLengthProvider {
- private static final Log LOG = LogFactory.getLog(WALFactory.class);
+ private static final Logger LOG = LoggerFactory.getLogger(WALFactory.class);
/**
* Maps between configuration names for providers and implementation classes.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
index ce1713a127..18ea7d7251 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
@@ -52,8 +52,6 @@ import java.util.concurrent.atomic.AtomicReference;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FileStatus;
@@ -71,6 +69,7 @@ import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.HeapSize;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.master.SplitLogManager;
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.monitoring.TaskMonitor;
@@ -97,6 +96,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;
import org.apache.hadoop.io.MultipleIOException;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This class is responsible for splitting up a bunch of regionserver commit log
@@ -105,7 +106,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class WALSplitter {
- private static final Log LOG = LogFactory.getLog(WALSplitter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(WALSplitter.class);
/** By default we retry errors in splitting, rather than skipping. */
public static final boolean SPLIT_SKIP_ERRORS_DEFAULT = false;
@@ -1538,7 +1539,7 @@ public class WALSplitter {
} catch (IOException e) {
e = e instanceof RemoteException ?
((RemoteException)e).unwrapRemoteException() : e;
- LOG.fatal(" Got while writing log entry to log", e);
+ LOG.error(HBaseMarkers.FATAL, " Got while writing log entry to log", e);
throw e;
}
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestTool.java
index 6a214385e6..37f3279dac 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestTool.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestTool.java
@@ -28,8 +28,6 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Stream;
import org.apache.commons.cli.CommandLine;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.MultithreadedTestUtil.RepeatingTestThread;
import org.apache.hadoop.hbase.MultithreadedTestUtil.TestContext;
@@ -51,7 +49,8 @@ import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.ToolRunner;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
/**
@@ -61,7 +60,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@InterfaceAudience.Private
public class AcidGuaranteesTestTool extends AbstractHBaseTool {
- private static final Log LOG = LogFactory.getLog(AcidGuaranteesTestTool.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AcidGuaranteesTestTool.class);
public static final TableName TABLE_NAME = TableName.valueOf("TestAcidGuarantees");
public static final byte[] FAMILY_A = Bytes.toBytes("A");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/GenericTestUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/GenericTestUtils.java
index 08565e07af..8ee8be697e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/GenericTestUtils.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/GenericTestUtils.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.hbase;
import java.io.File;
import java.io.IOException;
-import java.io.StringWriter;
import java.lang.management.ManagementFactory;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
@@ -32,13 +31,9 @@ import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.util.Time;
-import org.apache.log4j.Layout;
import org.apache.log4j.Logger;
-import org.apache.log4j.WriterAppender;
import org.junit.Assert;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
@@ -117,42 +112,12 @@ public abstract class GenericTestUtils {
TimedOutTestsListener.buildThreadDiagnosticString());
}
- public static class LogCapturer {
- private StringWriter sw = new StringWriter();
- private WriterAppender appender;
- private Logger logger;
-
- public static LogCapturer captureLogs(Log l) {
- Logger logger = ((Log4JLogger)l).getLogger();
- LogCapturer c = new LogCapturer(logger);
- return c;
- }
-
-
- private LogCapturer(Logger logger) {
- this.logger = logger;
- Layout layout = Logger.getRootLogger().getAppender("stdout").getLayout();
- WriterAppender wa = new WriterAppender(layout, sw);
- logger.addAppender(wa);
- }
-
- public String getOutput() {
- return sw.toString();
- }
-
- public void stopCapturing() {
- logger.removeAppender(appender);
-
- }
- }
-
-
/**
* Mockito answer helper that triggers one latch as soon as the
* method is called, then waits on another before continuing.
*/
public static class DelayAnswer implements Answer {
- private final Log LOG;
+ private final Logger LOG;
private final CountDownLatch fireLatch = new CountDownLatch(1);
private final CountDownLatch waitLatch = new CountDownLatch(1);
@@ -165,7 +130,7 @@ public abstract class GenericTestUtils {
private volatile Throwable thrown;
private volatile Object returnValue;
- public DelayAnswer(Log log) {
+ public DelayAnswer(Logger log) {
this.LOG = log;
}
@@ -262,13 +227,13 @@ public abstract class GenericTestUtils {
*/
public static class DelegateAnswer implements Answer {
private final Object delegate;
- private final Log log;
+ private final Logger log;
public DelegateAnswer(Object delegate) {
this(null, delegate);
}
- public DelegateAnswer(Log log, Object delegate) {
+ public DelegateAnswer(Logger log, Object delegate) {
this.log = log;
this.delegate = delegate;
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseCluster.java
index cac957bc88..58ae059091 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseCluster.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hbase;
import java.io.Closeable;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
@@ -57,7 +57,7 @@ import org.apache.hadoop.hbase.util.Threads;
@InterfaceAudience.Private
public abstract class HBaseCluster implements Closeable, Configurable {
// Log is being used in DistributedHBaseCluster class, hence keeping it as package scope
- static final Log LOG = LogFactory.getLog(HBaseCluster.class.getName());
+ static final Logger LOG = LoggerFactory.getLogger(HBaseCluster.class.getName());
protected Configuration conf;
/** the status of the cluster before we begin */
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
index 32cffc0887..92581b8fb6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase;
import java.io.IOException;
import java.util.NavigableMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -31,6 +29,7 @@ import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.regionserver.RegionAsTable;
@@ -38,6 +37,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSTableDescriptors;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import junit.framework.AssertionFailedError;
import junit.framework.TestCase;
@@ -49,7 +50,7 @@ import junit.framework.TestCase;
*/
@Deprecated
public abstract class HBaseTestCase extends TestCase {
- private static final Log LOG = LogFactory.getLog(HBaseTestCase.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HBaseTestCase.class);
protected final static byte [] fam1 = Bytes.toBytes("colfamily11");
protected final static byte [] fam2 = Bytes.toBytes("colfamily21");
@@ -115,7 +116,7 @@ public abstract class HBaseTestCase extends TestCase {
testDir = FSUtils.getRootDir(conf);
}
} catch (Exception e) {
- LOG.fatal("error during setup", e);
+ LOG.error(HBaseMarkers.FATAL, "error during setup", e);
throw e;
}
}
@@ -129,7 +130,7 @@ public abstract class HBaseTestCase extends TestCase {
}
}
} catch (Exception e) {
- LOG.fatal("error during tear down", e);
+ LOG.error(HBaseMarkers.FATAL, "error during tear down", e);
}
super.tearDown();
}
@@ -284,7 +285,7 @@ public abstract class HBaseTestCase extends TestCase {
* @throws IOException
*/
public static long addContent(final Table updater,
- final String columnFamily,
+ final String columnFamily,
final String column,
final byte [] startKeyBytes, final byte [] endKey, final long ts)
throws IOException {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
index e03298afe7..566c3b05c6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
@@ -55,8 +55,6 @@ import java.util.stream.Collectors;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.RandomStringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Jdk14Logger;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
@@ -145,11 +143,14 @@ import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MiniMRCluster;
import org.apache.hadoop.mapred.TaskLog;
import org.apache.hadoop.minikdc.MiniKdc;
+import org.apache.log4j.LogManager;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.WatchedEvent;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.ZooKeeper.States;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.slf4j.impl.Log4jLoggerAdapter;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
/**
@@ -2600,9 +2601,11 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility {
* @param clazz The class for which to switch to debug logging.
*/
public void enableDebug(Class> clazz) {
- Log l = LogFactory.getLog(clazz);
+ Logger l = LoggerFactory.getLogger(clazz);
if (l instanceof Log4JLogger) {
((Log4JLogger) l).getLogger().setLevel(org.apache.log4j.Level.DEBUG);
+ } else if (l instanceof Log4jLoggerAdapter) {
+ LogManager.getLogger(clazz).setLevel(org.apache.log4j.Level.DEBUG);
} else if (l instanceof Jdk14Logger) {
((Jdk14Logger) l).getLogger().setLevel(java.util.logging.Level.ALL);
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java
index b8a86c6d00..3ee6f7d6d4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java
@@ -22,14 +22,14 @@ import java.io.IOException;
import java.security.SecureRandom;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.commons.math3.random.RandomData;
import org.apache.commons.math3.random.RandomDataImpl;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.io.crypto.CryptoCipherProvider;
import org.apache.hadoop.hbase.io.crypto.DefaultCipherProvider;
@@ -62,8 +62,8 @@ public class HFilePerformanceEvaluation {
"WARN");
}
- private static final Log LOG =
- LogFactory.getLog(HFilePerformanceEvaluation.class.getName());
+ private static final Logger LOG =
+ LoggerFactory.getLogger(HFilePerformanceEvaluation.class.getName());
static byte [] format(final int i) {
String v = Integer.toString(i);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java
index 3f85181308..ad58124f5a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java
@@ -25,9 +25,10 @@ import java.util.HashSet;
import java.util.List;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hbase.master.HMaster;
@@ -53,7 +54,7 @@ import org.apache.hadoop.hbase.util.Threads;
*/
@InterfaceAudience.Public
public class MiniHBaseCluster extends HBaseCluster {
- private static final Log LOG = LogFactory.getLog(MiniHBaseCluster.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(MiniHBaseCluster.class.getName());
public LocalHBaseCluster hbaseCluster;
private static int index;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java
index c05830f6b4..f1e020f581 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java
@@ -26,8 +26,6 @@ import java.util.Map;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hbase.client.ClusterConnection;
@@ -56,7 +54,8 @@ import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.zookeeper.KeeperException;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
import com.google.protobuf.Service;
@@ -65,7 +64,7 @@ import com.google.protobuf.Service;
* Basic mock region server services. Should only be instantiated by HBaseTestingUtility.b
*/
public class MockRegionServerServices implements RegionServerServices {
- protected static final Log LOG = LogFactory.getLog(MockRegionServerServices.class);
+ protected static final Logger LOG = LoggerFactory.getLogger(MockRegionServerServices.class);
private final Map regions = new HashMap<>();
private final ConcurrentSkipListMap rit =
new ConcurrentSkipListMap<>(Bytes.BYTES_COMPARATOR);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/MultithreadedTestUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/MultithreadedTestUtil.java
index cf07b42a12..1d8de45e6d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/MultithreadedTestUtil.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/MultithreadedTestUtil.java
@@ -25,14 +25,14 @@ import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public abstract class MultithreadedTestUtil {
- private static final Log LOG =
- LogFactory.getLog(MultithreadedTestUtil.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(MultithreadedTestUtil.class);
public static class TestContext {
private final Configuration conf;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluationCommons.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluationCommons.java
index e2350e8118..f919db787d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluationCommons.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluationCommons.java
@@ -22,16 +22,16 @@ import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Code shared by PE tests.
*/
public class PerformanceEvaluationCommons {
- private static final Log LOG =
- LogFactory.getLog(PerformanceEvaluationCommons.class.getName());
+ private static final Logger LOG =
+ LoggerFactory.getLogger(PerformanceEvaluationCommons.class.getName());
public static void assertValueSize(final int expectedSize, final int got) {
if (got != expectedSize) {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestGlobalMemStoreSize.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestGlobalMemStoreSize.java
index 34e8c3c00d..a690987c4f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestGlobalMemStoreSize.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestGlobalMemStoreSize.java
@@ -25,8 +25,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionLocator;
@@ -42,7 +40,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
/**
@@ -51,7 +50,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
*/
@Category({MiscTests.class, MediumTests.class})
public class TestGlobalMemStoreSize {
- private static final Log LOG = LogFactory.getLog(TestGlobalMemStoreSize.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestGlobalMemStoreSize.class);
private static int regionServerNum = 4;
private static int regionNum = 16;
// total region num = region num + root and meta regions
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java
index 1d752d2af5..cf54b8f875 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java
@@ -35,8 +35,6 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
@@ -58,13 +56,15 @@ import org.junit.rules.TestName;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test our testing utility class
*/
@Category({MiscTests.class, LargeTests.class})
public class TestHBaseTestingUtility {
- private static final Log LOG = LogFactory.getLog(TestHBaseTestingUtility.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHBaseTestingUtility.class);
@Rule
public TestName name = new TestName();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIOFencing.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIOFencing.java
index 643940cdd1..6968794735 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIOFencing.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIOFencing.java
@@ -25,8 +25,6 @@ import java.util.Collection;
import java.util.List;
import java.util.concurrent.CountDownLatch;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -55,7 +53,8 @@ import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
import org.apache.hadoop.hbase.wal.WAL;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor;
@@ -81,7 +80,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDes
*/
@Category({MiscTests.class, LargeTests.class})
public class TestIOFencing {
- private static final Log LOG = LogFactory.getLog(TestIOFencing.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestIOFencing.class);
static {
// Uncomment the following lines if more verbosity is needed for
// debugging (see HBASE-12285 for details).
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java
index e63eaf24e3..ceb9f56257 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java
@@ -26,8 +26,6 @@ import java.net.ServerSocket;
import java.nio.channels.ServerSocketChannel;
import java.util.Locale;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Assert;
@@ -35,6 +33,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This tests whether ServerSocketChannel works over ipv6, which ZooKeeper
@@ -49,7 +49,7 @@ import org.junit.rules.TestRule;
*/
@Category({MiscTests.class, SmallTests.class})
public class TestIPv6NIOServerSocketChannel {
- private static final Log LOG = LogFactory.getLog(TestIPv6NIOServerSocketChannel.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestIPv6NIOServerSocketChannel.class);
@Rule
public final TestRule timeout = CategoryBasedTimeout.builder().
@@ -103,7 +103,7 @@ public class TestIPv6NIOServerSocketChannel {
if (channel != null) {
channel.close();
}
- }
+ }
}
}
@@ -126,8 +126,7 @@ public class TestIPv6NIOServerSocketChannel {
//or java.net.SocketException: Protocol family not supported
Assert.assertFalse(ex instanceof BindException);
Assert.assertTrue(ex.getMessage().toLowerCase(Locale.ROOT).contains("protocol family"));
- LOG.info("Received expected exception:");
- LOG.info(ex);
+ LOG.info("Received expected exception:", ex);
//if this is the case, ensure that we are running on preferIPv4=true
ensurePreferIPv4();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestInfoServers.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestInfoServers.java
index c5a817ba73..f3d3115496 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestInfoServers.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestInfoServers.java
@@ -27,8 +27,6 @@ import java.io.InputStream;
import java.net.URL;
import org.apache.commons.io.IOUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.MiscTests;
@@ -39,6 +37,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Testing, info servers are disabled. This test enables then and checks that
@@ -46,7 +46,7 @@ import org.junit.rules.TestName;
*/
@Category({MiscTests.class, MediumTests.class})
public class TestInfoServers {
- private static final Log LOG = LogFactory.getLog(TestInfoServers.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestInfoServers.class);
private final static HBaseTestingUtility UTIL = new HBaseTestingUtility();
@Rule
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXConnectorServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXConnectorServer.java
index a5cf1a8423..444db644e2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXConnectorServer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXConnectorServer.java
@@ -24,8 +24,6 @@ import javax.management.remote.JMXConnector;
import javax.management.remote.JMXConnectorFactory;
import javax.naming.ServiceUnavailableException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
@@ -41,13 +39,15 @@ import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test case for JMX Connector Server.
*/
@Category({ MiscTests.class, MediumTests.class })
public class TestJMXConnectorServer {
- private static final Log LOG = LogFactory.getLog(TestJMXConnectorServer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestJMXConnectorServer.class);
private static HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static Configuration conf = null;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXListener.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXListener.java
index 6c8f27ad1b..520294a8c9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXListener.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXListener.java
@@ -24,8 +24,6 @@ import javax.management.MBeanServerConnection;
import javax.management.remote.JMXConnector;
import javax.management.remote.JMXConnectorFactory;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.testclassification.MediumTests;
@@ -39,12 +37,14 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.ExpectedException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MiscTests.class, MediumTests.class})
public class TestJMXListener {
- private static final Log LOG = LogFactory.getLog(TestJMXListener.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestJMXListener.class);
private static HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static int connectorPort = 61120;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessor.java
index 15c0b0ca29..14057de069 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessor.java
@@ -34,8 +34,6 @@ import java.io.IOException;
import java.util.List;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
@@ -66,7 +64,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
/**
@@ -75,7 +74,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@Category({MiscTests.class, MediumTests.class})
@SuppressWarnings("deprecation")
public class TestMetaTableAccessor {
- private static final Log LOG = LogFactory.getLog(TestMetaTableAccessor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestMetaTableAccessor.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static Connection connection;
private Random random = new Random();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java
index 961677b10a..7049a74581 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java
@@ -27,8 +27,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.NavigableMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.client.ClusterConnection;
import org.apache.hadoop.hbase.client.HConnectionTestingUtility;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -49,7 +47,8 @@ import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
@@ -59,7 +58,7 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
*/
@Category({MiscTests.class, MediumTests.class})
public class TestMetaTableAccessorNoCluster {
- private static final Log LOG = LogFactory.getLog(TestMetaTableAccessorNoCluster.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestMetaTableAccessorNoCluster.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final Abortable ABORTABLE = new Abortable() {
boolean aborted = false;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java
index c2a49451d9..33cac66423 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java
@@ -26,8 +26,6 @@ import static org.junit.Assert.assertFalse;
import java.io.IOException;
import java.net.ConnectException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.ClusterConnection;
import org.apache.hadoop.hbase.client.HConnectionTestingUtility;
@@ -53,7 +51,8 @@ import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
@@ -62,7 +61,7 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
*/
@Category({MiscTests.class, MediumTests.class})
public class TestMetaTableLocator {
- private static final Log LOG = LogFactory.getLog(TestMetaTableLocator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestMetaTableLocator.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final ServerName SN =
ServerName.valueOf("example.org", 1234, System.currentTimeMillis());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMovedRegionsCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMovedRegionsCleaner.java
index 5fb16c79b0..bdb74a4b19 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMovedRegionsCleaner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMovedRegionsCleaner.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.hbase;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.MiscTests;
@@ -28,6 +26,8 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
@@ -37,7 +37,7 @@ import java.io.IOException;
*/
@Category({ MiscTests.class, MediumTests.class }) public class TestMovedRegionsCleaner {
- private static final Log LOG = LogFactory.getLog(TestRegionRebalancing.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRegionRebalancing.class);
private final HBaseTestingUtility UTIL = new HBaseTestingUtility();
public static int numCalls = 0;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java
index 12605519f5..1a0215e5a9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java
@@ -28,8 +28,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.NavigableMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TimestampTestBase.FlushCache;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Get;
@@ -49,6 +47,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Port of old TestScanMultipleVersions, TestTimestamp and TestGetRowVersions
@@ -56,7 +56,7 @@ import org.junit.rules.TestName;
*/
@Category({MiscTests.class, MediumTests.class})
public class TestMultiVersions {
- private static final Log LOG = LogFactory.getLog(TestMultiVersions.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestMultiVersions.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private Admin admin;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java
index 78b9585b79..7b4c930af4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java
@@ -28,8 +28,6 @@ import java.io.IOException;
import java.util.Set;
import java.util.concurrent.Callable;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.client.Admin;
@@ -50,10 +48,12 @@ import org.junit.experimental.categories.Category;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MiscTests.class, MediumTests.class})
public class TestNamespace {
- private static final Log LOG = LogFactory.getLog(TestNamespace.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestNamespace.class);
private static HMaster master;
protected final static int NUM_SLAVES_BASE = 4;
private static HBaseTestingUtility TEST_UTIL;
@@ -130,7 +130,7 @@ public class TestNamespace {
try {
admin.createNamespace(NamespaceDescriptor.DEFAULT_NAMESPACE);
} catch (IOException exp) {
- LOG.warn(exp);
+ LOG.warn(exp.toString(), exp);
exceptionCaught = true;
} finally {
assertTrue(exceptionCaught);
@@ -140,7 +140,7 @@ public class TestNamespace {
try {
admin.createNamespace(NamespaceDescriptor.SYSTEM_NAMESPACE);
} catch (IOException exp) {
- LOG.warn(exp);
+ LOG.warn(exp.toString(), exp);
exceptionCaught = true;
} finally {
assertTrue(exceptionCaught);
@@ -153,7 +153,7 @@ public class TestNamespace {
try {
admin.deleteNamespace(NamespaceDescriptor.DEFAULT_NAMESPACE_NAME_STR);
} catch (IOException exp) {
- LOG.warn(exp);
+ LOG.warn(exp.toString(), exp);
exceptionCaught = true;
} finally {
assertTrue(exceptionCaught);
@@ -162,7 +162,7 @@ public class TestNamespace {
try {
admin.deleteNamespace(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR);
} catch (IOException exp) {
- LOG.warn(exp);
+ LOG.warn(exp.toString(), exp);
exceptionCaught = true;
} finally {
assertTrue(exceptionCaught);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNodeHealthCheckChore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNodeHealthCheckChore.java
index 9360b1f34e..22daf49093 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNodeHealthCheckChore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNodeHealthCheckChore.java
@@ -27,8 +27,6 @@ import java.io.IOException;
import java.io.PrintWriter;
import java.util.UUID;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -39,11 +37,13 @@ import org.apache.hadoop.util.Shell;
import org.junit.After;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MiscTests.class, SmallTests.class})
public class TestNodeHealthCheckChore {
- private static final Log LOG = LogFactory.getLog(TestNodeHealthCheckChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestNodeHealthCheckChore.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final int SCRIPT_TIMEOUT = 5000;
private File healthScriptFile;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java
index 3b2b6fed90..12f43d4867 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java
@@ -30,8 +30,6 @@ import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.client.ClientScanner;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Put;
@@ -56,6 +54,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* These tests are focused on testing how partial results appear to a client. Partial results are
@@ -69,7 +69,7 @@ import org.junit.rules.TestName;
*/
@Category(MediumTests.class)
public class TestPartialResultsFromClientSide {
- private static final Log LOG = LogFactory.getLog(TestPartialResultsFromClientSide.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestPartialResultsFromClientSide.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private final static int MINICLUSTER_SIZE = 5;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionRebalancing.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionRebalancing.java
index 467aadacbd..5f2898fadd 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionRebalancing.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionRebalancing.java
@@ -27,8 +27,6 @@ import java.util.Arrays;
import java.util.Collection;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -47,7 +45,8 @@ import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
/**
@@ -68,7 +67,7 @@ public class TestRegionRebalancing {
}
private static final byte[] FAMILY_NAME = Bytes.toBytes("col");
- private static final Log LOG = LogFactory.getLog(TestRegionRebalancing.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRegionRebalancing.class);
private final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private RegionLocator regionLocator;
private HTableDescriptor desc;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestStochasticBalancerJmxMetrics.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestStochasticBalancerJmxMetrics.java
index f987ea7ca1..22ab8b9fe6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestStochasticBalancerJmxMetrics.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestStochasticBalancerJmxMetrics.java
@@ -36,8 +36,6 @@ import java.util.Map;
import java.util.Random;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
@@ -54,12 +52,14 @@ import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runners.MethodSorters;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ MiscTests.class, MediumTests.class })
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
@Ignore
public class TestStochasticBalancerJmxMetrics extends BalancerTestBase {
- private static final Log LOG = LogFactory.getLog(TestStochasticBalancerJmxMetrics.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestStochasticBalancerJmxMetrics.class);
private static HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static int connectorPort = 61120;
private static StochasticLoadBalancer loadBalancer;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
index ed93b1750a..b41e39907e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
@@ -29,8 +29,6 @@ import java.io.IOException;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Put;
@@ -66,11 +64,13 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MiscTests.class, LargeTests.class})
public class TestZooKeeper {
- private static final Log LOG = LogFactory.getLog(TestZooKeeper.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestZooKeeper.class);
private final static HBaseTestingUtility
TEST_UTIL = new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java
index c95f7b33e6..95de9bcc67 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java
@@ -26,8 +26,6 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -56,6 +54,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test that the {@link HFileArchiver} correctly removes all the parts of a region when cleaning up
@@ -64,7 +64,7 @@ import org.junit.rules.TestName;
@Category({MediumTests.class, MiscTests.class})
public class TestHFileArchiving {
- private static final Log LOG = LogFactory.getLog(TestHFileArchiving.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHFileArchiving.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final byte[] TEST_FAM = Bytes.toBytes("fam");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java
index 1c2279cc3c..4eb1ae4973 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java
@@ -27,8 +27,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -65,6 +63,8 @@ import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Spin up a small cluster and check that the hfiles of region are properly long-term archived as
@@ -73,7 +73,7 @@ import org.mockito.stubbing.Answer;
@Category({MiscTests.class, MediumTests.class})
public class TestZooKeeperTableArchiveClient {
- private static final Log LOG = LogFactory.getLog(TestZooKeeperTableArchiveClient.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestZooKeeperTableArchiveClient.class);
private static final HBaseTestingUtility UTIL = HBaseTestingUtility.createLocalHTU();
private static final String STRING_TABLE_NAME = "test";
private static final byte[] TEST_FAM = Bytes.toBytes("fam");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
index 0b7da280c3..7e5ccf3de4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
@@ -33,8 +33,6 @@ import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
@@ -65,7 +63,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTableRegionsRequest;
@@ -76,7 +75,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTable
*/
@Category({LargeTests.class, ClientTests.class})
public class TestAdmin1 {
- private static final Log LOG = LogFactory.getLog(TestAdmin1.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestAdmin1.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private Admin admin;
@@ -1253,7 +1252,7 @@ public class TestAdmin1 {
try {
this.admin.deleteColumnFamily(tableName, Bytes.toBytes("col2"));
} catch (TableNotDisabledException e) {
- LOG.info(e);
+ LOG.info(e.toString(), e);
}
this.admin.disableTable(tableName);
this.admin.deleteTable(tableName);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
index 953fae0876..fb5febc4ba 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
@@ -37,8 +37,6 @@ import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClusterStatus.Option;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -75,7 +73,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -86,7 +85,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
*/
@Category({LargeTests.class, ClientTests.class})
public class TestAdmin2 {
- private static final Log LOG = LogFactory.getLog(TestAdmin2.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestAdmin2.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private Admin admin;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBase.java
index 83ba24411b..525fa4c93f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBase.java
@@ -27,8 +27,6 @@ import java.util.function.Supplier;
import java.util.regex.Pattern;
import org.apache.commons.io.IOUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
@@ -41,13 +39,15 @@ import org.junit.Rule;
import org.junit.rules.TestName;
import org.junit.runners.Parameterized.Parameter;
import org.junit.runners.Parameterized.Parameters;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Class to test AsyncAdmin.
*/
public abstract class TestAsyncAdminBase {
- protected static final Log LOG = LogFactory.getLog(TestAsyncAdminBase.class);
+ protected static final Logger LOG = LoggerFactory.getLogger(TestAsyncAdminBase.class);
protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
protected static final byte[] FAMILY = Bytes.toBytes("testFamily");
protected static final byte[] FAMILY_0 = Bytes.toBytes("cf0");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBuilder.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBuilder.java
index 05324aa8ef..8b3b18166e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBuilder.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBuilder.java
@@ -31,8 +31,6 @@ import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Supplier;
import org.apache.commons.io.IOUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
@@ -51,12 +49,14 @@ import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameter;
import org.junit.runners.Parameterized.Parameters;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@RunWith(Parameterized.class)
@Category({ LargeTests.class, ClientTests.class })
public class TestAsyncAdminBuilder {
- private static final Log LOG = LogFactory.getLog(TestAsyncAdminBuilder.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestAsyncAdminBuilder.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static AsyncConnection ASYNC_CONN;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncNamespaceAdminApi.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncNamespaceAdminApi.java
index dd3655ebae..ada8824ab8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncNamespaceAdminApi.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncNamespaceAdminApi.java
@@ -64,7 +64,7 @@ public class TestAsyncNamespaceAdminApi extends TestAsyncAdminBase {
TEST_UTIL.getConfiguration().setInt(START_LOG_ERRORS_AFTER_COUNT_KEY, 0);
TEST_UTIL.startMiniCluster(1);
ASYNC_CONN = ConnectionFactory.createAsyncConnection(TEST_UTIL.getConfiguration()).get();
- master = ((MiniHBaseCluster) TEST_UTIL.getHBaseCluster()).getMaster();
+ master = TEST_UTIL.getHBaseCluster().getMaster();
zkNamespaceManager = new ZKNamespaceManager(master.getZooKeeper());
zkNamespaceManager.start();
LOG.info("Done initializing cluster");
@@ -98,7 +98,7 @@ public class TestAsyncNamespaceAdminApi extends TestAsyncAdminBase {
try {
admin.deleteNamespace(NamespaceDescriptor.DEFAULT_NAMESPACE_NAME_STR).join();
} catch (Exception exp) {
- LOG.warn(exp);
+ LOG.warn(exp.toString(), exp);
exceptionCaught = true;
} finally {
assertTrue(exceptionCaught);
@@ -107,7 +107,7 @@ public class TestAsyncNamespaceAdminApi extends TestAsyncAdminBase {
try {
admin.deleteNamespace(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR).join();
} catch (Exception exp) {
- LOG.warn(exp);
+ LOG.warn(exp.toString(), exp);
exceptionCaught = true;
} finally {
assertTrue(exceptionCaught);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncQuotaAdminApi.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncQuotaAdminApi.java
index c39a582c10..a3b160fb20 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncQuotaAdminApi.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncQuotaAdminApi.java
@@ -18,6 +18,13 @@
package org.apache.hadoop.hbase.client;
+import static org.apache.hadoop.hbase.client.AsyncProcess.START_LOG_ERRORS_AFTER_COUNT_KEY;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+import java.util.Objects;
+import java.util.concurrent.TimeUnit;
+
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.quotas.QuotaCache;
@@ -36,12 +43,6 @@ import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
-import java.util.concurrent.TimeUnit;
-
-import static org.apache.hadoop.hbase.client.AsyncProcess.START_LOG_ERRORS_AFTER_COUNT_KEY;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
-
@RunWith(Parameterized.class)
@Category({ ClientTests.class, MediumTests.class })
public class TestAsyncQuotaAdminApi extends TestAsyncAdminBase {
@@ -180,7 +181,7 @@ public class TestAsyncQuotaAdminApi extends TestAsyncAdminBase {
private int countResults(final QuotaFilter filter) throws Exception {
int count = 0;
for (QuotaSettings settings : admin.getQuota(filter).get()) {
- LOG.debug(settings);
+ LOG.debug(Objects.toString(settings));
count++;
}
return count;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
index e6cffd6333..dcccfd168b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
@@ -418,7 +418,7 @@ public class TestAsyncRegionAdminApi extends TestAsyncAdminBase {
}
Thread.sleep(1000L);
} catch (Exception e) {
- LOG.error(e);
+ LOG.error(e.toString(), e);
}
}
assertEquals(count, 2);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.java
index 9388144db7..7d95a712e6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.java
@@ -34,8 +34,6 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -70,11 +68,13 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ LargeTests.class, ClientTests.class })
@SuppressWarnings("deprecation")
public class TestBlockEvictionFromClient {
- private static final Log LOG = LogFactory.getLog(TestBlockEvictionFromClient.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestBlockEvictionFromClient.class);
protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
static byte[][] ROWS = new byte[2][];
private static int NO_OF_THREADS = 3;
@@ -184,7 +184,7 @@ public class TestBlockEvictionFromClient {
// get the block cache and region
RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(tableName);
String regionName = locator.getAllRegionLocations().get(0).getRegionInfo().getEncodedName();
- HRegion region = (HRegion) TEST_UTIL.getRSForFirstRegionInTable(tableName)
+ HRegion region = TEST_UTIL.getRSForFirstRegionInTable(tableName)
.getRegion(regionName);
HStore store = region.getStores().iterator().next();
CacheConfig cacheConf = store.getCacheConfig();
@@ -275,7 +275,7 @@ public class TestBlockEvictionFromClient {
RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(tableName);
String regionName = locator.getAllRegionLocations().get(0).getRegionInfo().getEncodedName();
HRegion region =
- (HRegion) TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName);
+ TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName);
HStore store = region.getStores().iterator().next();
CacheConfig cacheConf = store.getCacheConfig();
cacheConf.setCacheDataOnWrite(true);
@@ -334,7 +334,7 @@ public class TestBlockEvictionFromClient {
RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(tableName);
String regionName = locator.getAllRegionLocations().get(0).getRegionInfo().getEncodedName();
HRegion region =
- (HRegion) TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName);
+ TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName);
HStore store = region.getStores().iterator().next();
CacheConfig cacheConf = store.getCacheConfig();
cacheConf.setCacheDataOnWrite(true);
@@ -396,7 +396,7 @@ public class TestBlockEvictionFromClient {
RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(tableName);
String regionName = locator.getAllRegionLocations().get(0).getRegionInfo().getEncodedName();
HRegion region =
- (HRegion) TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName);
+ TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName);
BlockCache cache = setCacheProperties(region);
Put put = new Put(ROW);
put.addColumn(FAMILY, QUALIFIER, data);
@@ -489,7 +489,7 @@ public class TestBlockEvictionFromClient {
RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(tableName);
String regionName = locator.getAllRegionLocations().get(0).getRegionInfo().getEncodedName();
HRegion region =
- (HRegion) TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName);
+ TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName);
BlockCache cache = setCacheProperties(region);
Put put = new Put(ROW);
@@ -573,7 +573,7 @@ public class TestBlockEvictionFromClient {
RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(tableName);
String regionName = locator.getAllRegionLocations().get(0).getRegionInfo().getEncodedName();
HRegion region =
- (HRegion) TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName);
+ TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName);
HStore store = region.getStores().iterator().next();
CacheConfig cacheConf = store.getCacheConfig();
cacheConf.setEvictOnClose(true);
@@ -636,7 +636,7 @@ public class TestBlockEvictionFromClient {
RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(tableName);
String regionName = locator.getAllRegionLocations().get(0).getRegionInfo().getEncodedName();
HRegion region =
- (HRegion) TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName);
+ TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName);
HStore store = region.getStores().iterator().next();
CacheConfig cacheConf = store.getCacheConfig();
cacheConf.setCacheDataOnWrite(true);
@@ -719,7 +719,7 @@ public class TestBlockEvictionFromClient {
RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(tableName);
String regionName = locator.getAllRegionLocations().get(0).getRegionInfo().getEncodedName();
HRegion region =
- (HRegion) TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName);
+ TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName);
BlockCache cache = setCacheProperties(region);
Put put = new Put(ROW);
@@ -819,7 +819,7 @@ public class TestBlockEvictionFromClient {
RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(tableName);
String regionName = locator.getAllRegionLocations().get(0).getRegionInfo().getEncodedName();
HRegion region =
- (HRegion) TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName);
+ TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName);
HStore store = region.getStores().iterator().next();
CacheConfig cacheConf = store.getCacheConfig();
cacheConf.setCacheDataOnWrite(true);
@@ -885,7 +885,7 @@ public class TestBlockEvictionFromClient {
RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(tableName);
String regionName = locator.getAllRegionLocations().get(0).getRegionInfo().getEncodedName();
HRegion region =
- (HRegion) TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName);
+ TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName);
HStore store = region.getStores().iterator().next();
CacheConfig cacheConf = store.getCacheConfig();
cacheConf.setCacheDataOnWrite(true);
@@ -1003,7 +1003,7 @@ public class TestBlockEvictionFromClient {
RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(tableName);
String regionName = locator.getAllRegionLocations().get(0).getRegionInfo().getEncodedName();
HRegion region =
- (HRegion) TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName);
+ TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName);
HStore store = region.getStores().iterator().next();
CacheConfig cacheConf = store.getCacheConfig();
cacheConf.setCacheDataOnWrite(true);
@@ -1133,7 +1133,7 @@ public class TestBlockEvictionFromClient {
RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(tableName);
String regionName = locator.getAllRegionLocations().get(0).getRegionInfo().getEncodedName();
HRegion region =
- (HRegion) TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName);
+ TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName);
HStore store = region.getStores().iterator().next();
CacheConfig cacheConf = store.getCacheConfig();
cacheConf.setCacheDataOnWrite(true);
@@ -1617,7 +1617,7 @@ public class TestBlockEvictionFromClient {
}
}
} catch (InterruptedException e1) {
- LOG.error(e1);
+ LOG.error(e1.toString(), e1);
}
}
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientOperationInterrupt.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientOperationInterrupt.java
index e92ba23453..cbead73d4f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientOperationInterrupt.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientOperationInterrupt.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.hbase.client;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -40,6 +38,8 @@ import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InterruptedIOException;
@@ -51,7 +51,7 @@ import java.util.concurrent.atomic.AtomicInteger;
@Category({MediumTests.class, ClientTests.class})
public class TestClientOperationInterrupt {
- private static final Log LOG = LogFactory.getLog(TestClientOperationInterrupt.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestClientOperationInterrupt.class);
private static HBaseTestingUtility util;
private static final TableName tableName = TableName.valueOf("test");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientPushback.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientPushback.java
index 12c7faeb39..62eb31674f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientPushback.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientPushback.java
@@ -24,8 +24,6 @@ import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.backoff.ClientBackoffPolicy;
@@ -43,6 +41,8 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.apache.hadoop.hbase.client.MetricsConnection.CLIENT_SIDE_METRICS_ENABLED_KEY;
import static org.junit.Assert.assertEquals;
@@ -56,7 +56,7 @@ import static org.junit.Assert.assertTrue;
@Category(MediumTests.class)
public class TestClientPushback {
- private static final Log LOG = LogFactory.getLog(TestClientPushback.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestClientPushback.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final TableName tableName = TableName.valueOf("client-pushback");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java
index c9686c2e7e..e225d556d9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java
@@ -21,17 +21,11 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.MiniHBaseCluster.MiniHBaseClusterRegionServer;
import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.CoordinatedStateManager;
-import org.apache.hadoop.hbase.ipc.AbstractRpcClient;
-import org.apache.hadoop.hbase.ipc.RpcServer;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
@@ -39,7 +33,6 @@ import org.apache.hadoop.hbase.regionserver.RSRpcServices;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.log4j.Level;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Rule;
@@ -49,6 +42,8 @@ import org.junit.experimental.categories.Category;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test the scenario where a HRegionServer#scan() call, while scanning, timeout at client side and
@@ -56,7 +51,7 @@ import org.junit.rules.TestName;
*/
@Category({MediumTests.class, ClientTests.class})
public class TestClientScannerRPCTimeout {
- private static final Log LOG = LogFactory.getLog(TestClientScannerRPCTimeout.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestClientScannerRPCTimeout.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final byte[] FAMILY = Bytes.toBytes("testFamily");
private static final byte[] QUALIFIER = Bytes.toBytes("testQualifier");
@@ -69,9 +64,6 @@ public class TestClientScannerRPCTimeout {
@BeforeClass
public static void setUpBeforeClass() throws Exception {
- ((Log4JLogger)RpcServer.LOG).getLogger().setLevel(Level.ALL);
- ((Log4JLogger)AbstractRpcClient.LOG).getLogger().setLevel(Level.ALL);
- ((Log4JLogger)ScannerCallable.LOG).getLogger().setLevel(Level.ALL);
Configuration conf = TEST_UTIL.getConfiguration();
// Don't report so often so easier to see other rpcs
conf.setInt("hbase.regionserver.msginterval", 3 * 10000);
@@ -146,6 +138,7 @@ public class TestClientScannerRPCTimeout {
super(conf);
}
+ @Override
protected RSRpcServices createRpcServices() throws IOException {
return new RSRpcServicesWithScanTimeout(this);
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCloneSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCloneSnapshotFromClient.java
index f191bea06c..96a123b65c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCloneSnapshotFromClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCloneSnapshotFromClient.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.client;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.NamespaceDescriptor;
@@ -40,13 +38,15 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test clone snapshots from the client
*/
@Category({LargeTests.class, ClientTests.class})
public class TestCloneSnapshotFromClient {
- private static final Log LOG = LogFactory.getLog(TestCloneSnapshotFromClient.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestCloneSnapshotFromClient.class);
protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestDropTimeoutRequest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestDropTimeoutRequest.java
index 46aa72fc97..141b13f175 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestDropTimeoutRequest.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestDropTimeoutRequest.java
@@ -17,8 +17,6 @@
*/
package org.apache.hadoop.hbase.client;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -39,6 +37,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.List;
@@ -60,7 +60,7 @@ public class TestDropTimeoutRequest {
@Rule
public TestName name = new TestName();
- private static final Log LOG = LogFactory.getLog(TestDropTimeoutRequest.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestDropTimeoutRequest.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final byte[] FAM_NAM = Bytes.toBytes("f");
private static final int RPC_RETRY = 5;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestEnableTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestEnableTable.java
index 6b0359439d..ca4163ece5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestEnableTable.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestEnableTable.java
@@ -24,8 +24,6 @@ import java.util.List;
import java.util.Optional;
import java.util.concurrent.CountDownLatch;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
@@ -53,6 +51,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@@ -61,7 +61,7 @@ import static org.junit.Assert.fail;
@Category({ MasterTests.class, MediumTests.class })
public class TestEnableTable {
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
- private static final Log LOG = LogFactory.getLog(TestEnableTable.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestEnableTable.class);
private static final byte[] FAMILYNAME = Bytes.toBytes("fam");
@Rule
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java
index 154f6eb700..47516ec758 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java
@@ -32,8 +32,6 @@ import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -58,10 +56,12 @@ import org.junit.Test;
import org.junit.Ignore;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MediumTests.class, ClientTests.class})
public class TestFastFail {
- private static final Log LOG = LogFactory.getLog(TestFastFail.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestFastFail.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static byte[] FAMILY = Bytes.toBytes("testFamily");
private static final Random random = new Random();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
index f4ed71c96c..c2a7f6b1ad 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
@@ -46,8 +46,6 @@ import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.commons.lang3.ArrayUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
@@ -111,6 +109,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Run tests that use the HBase clients; {@link Table}.
@@ -121,7 +121,7 @@ import org.junit.rules.TestName;
@SuppressWarnings ("deprecation")
public class TestFromClientSide {
// NOTE: Increment tests were moved to their own class, TestIncrementsFromClientSide.
- private static final Log LOG = LogFactory.getLog(TestFromClientSide.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestFromClientSide.class);
protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static byte [] ROW = Bytes.toBytes("testRow");
private static byte [] FAMILY = Bytes.toBytes("testFamily");
@@ -1160,7 +1160,7 @@ public class TestFromClientSide {
// Null family (should NOT work)
try {
- TEST_UTIL.createTable(tableName, new byte[][]{(byte[])null});
+ TEST_UTIL.createTable(tableName, new byte[][]{null});
fail("Creating a table with a null family passed, should fail");
} catch(Exception e) {}
@@ -4766,7 +4766,7 @@ public class TestFromClientSide {
// the error happens in a thread, it won't fail the test,
// need to pass it to the caller for proper handling.
error.set(e);
- LOG.error(e);
+ LOG.error(e.toString(), e);
}
return null;
@@ -5163,7 +5163,7 @@ public class TestFromClientSide {
// get the block cache and region
String regionName = locator.getAllRegionLocations().get(0).getRegionInfo().getEncodedName();
- HRegion region = (HRegion) TEST_UTIL.getRSForFirstRegionInTable(tableName)
+ HRegion region = TEST_UTIL.getRSForFirstRegionInTable(tableName)
.getRegion(regionName);
HStore store = region.getStores().iterator().next();
CacheConfig cacheConf = store.getCacheConfig();
@@ -5516,18 +5516,18 @@ public class TestFromClientSide {
// put the same row 4 times, with different values
Put p = new Put(row);
- p.addColumn(FAMILY, QUALIFIER, (long) 10, VALUE);
+ p.addColumn(FAMILY, QUALIFIER, 10, VALUE);
table.put(p);
p = new Put(row);
- p.addColumn(FAMILY, QUALIFIER, (long) 11, ArrayUtils.add(VALUE, (byte) 2));
+ p.addColumn(FAMILY, QUALIFIER, 11, ArrayUtils.add(VALUE, (byte) 2));
table.put(p);
p = new Put(row);
- p.addColumn(FAMILY, QUALIFIER, (long) 12, ArrayUtils.add(VALUE, (byte) 3));
+ p.addColumn(FAMILY, QUALIFIER, 12, ArrayUtils.add(VALUE, (byte) 3));
table.put(p);
p = new Put(row);
- p.addColumn(FAMILY, QUALIFIER, (long) 13, ArrayUtils.add(VALUE, (byte) 4));
+ p.addColumn(FAMILY, QUALIFIER, 13, ArrayUtils.add(VALUE, (byte) 4));
table.put(p);
int versions = 4;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
index e5d5324c99..2d67b3e359 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
@@ -31,8 +31,6 @@ import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
@@ -74,10 +72,12 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({LargeTests.class, ClientTests.class})
public class TestFromClientSide3 {
- private static final Log LOG = LogFactory.getLog(TestFromClientSide3.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestFromClientSide3.class);
private final static HBaseTestingUtility TEST_UTIL
= new HBaseTestingUtility();
private static byte[] FAMILY = Bytes.toBytes("testFamily");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java
index fb9fb37a90..712dc55bf3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java
@@ -24,8 +24,6 @@ import static org.mockito.Mockito.when;
import java.io.IOException;
import java.util.ArrayList;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -59,13 +57,15 @@ import org.mockito.Matchers;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.google.protobuf.ServiceException;
@Category({SmallTests.class, ClientTests.class})
public class TestHBaseAdminNoCluster {
- private static final Log LOG = LogFactory.getLog(TestHBaseAdminNoCluster.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHBaseAdminNoCluster.class);
@Rule
public TestName name = new TestName();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java
index 7e886920ab..104b49949c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java
@@ -41,8 +41,6 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.Cell;
@@ -87,6 +85,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
/**
@@ -98,7 +98,7 @@ public class TestHCM {
.withTimeout(this.getClass())
.withLookingForStuckThread(true)
.build();
- private static final Log LOG = LogFactory.getLog(TestHCM.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHCM.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final TableName TABLE_NAME =
TableName.valueOf("test");
@@ -747,7 +747,7 @@ public class TestHCM {
}
} catch (Throwable t) {
failed.set(t);
- LOG.error(t);
+ LOG.error(t.toString(), t);
}
step.set(3);
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexer.java
index 5c47de0c46..f59a0e9504 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexer.java
@@ -25,8 +25,6 @@ import static org.junit.Assert.assertTrue;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.testclassification.ClientTests;
@@ -38,10 +36,12 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({LargeTests.class, ClientTests.class})
public class TestHTableMultiplexer {
- private static final Log LOG = LogFactory.getLog(TestHTableMultiplexer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHTableMultiplexer.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static byte[] FAMILY = Bytes.toBytes("testFamily");
private static byte[] QUALIFIER = Bytes.toBytes("testQualifier");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java
index 99571cc83a..657dfd8ab7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java
@@ -19,8 +19,6 @@
*/
package org.apache.hadoop.hbase.client;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.MiniHBaseCluster;
@@ -38,13 +36,15 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
@Category({ LargeTests.class, ClientTests.class })
public class TestHTableMultiplexerFlushCache {
- private static final Log LOG = LogFactory.getLog(TestHTableMultiplexerFlushCache.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHTableMultiplexerFlushCache.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static byte[] FAMILY = Bytes.toBytes("testFamily");
private static byte[] QUALIFIER1 = Bytes.toBytes("testQualifier_1");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIllegalTableDescriptor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIllegalTableDescriptor.java
index 9d88dae718..df39e0988a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIllegalTableDescriptor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIllegalTableDescriptor.java
@@ -18,21 +18,16 @@
*/
package org.apache.hadoop.hbase.client;
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
+import static org.mockito.ArgumentMatchers.contains;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
+import java.lang.reflect.Field;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -42,21 +37,19 @@ import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.log4j.AppenderSkeleton;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
-import org.apache.log4j.spi.LoggingEvent;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
@Category({LargeTests.class, ClientTests.class})
public class TestIllegalTableDescriptor {
// NOTE: Increment tests were moved to their own class, TestIncrementsFromClientSide.
- private static final Log LOG = LogFactory.getLog(TestFromClientSide.class);
+ private static final Logger masterLogger;
+
protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static byte [] FAMILY = Bytes.toBytes("testFamily");
@@ -64,8 +57,17 @@ public class TestIllegalTableDescriptor {
@Rule
public TestName name = new TestName();
+ static {
+ masterLogger = mock(Logger.class);
+ }
+
@BeforeClass
public static void setUpBeforeClass() throws Exception {
+ // replacing HMaster.LOG with our mock logger for verifying logging
+ Field field = HMaster.class.getDeclaredField("LOG");
+ field.setAccessible(true);
+ field.set(null, masterLogger);
+
Configuration conf = TEST_UTIL.getConfiguration();
conf.setBoolean("hbase.table.sanity.checks", true); // enable for below tests
// We need more than one region server in this test
@@ -165,20 +167,12 @@ public class TestIllegalTableDescriptor {
htd.setMemStoreFlushSize(0);
// Check that logs warn on invalid table but allow it.
- ListAppender listAppender = new ListAppender();
- Logger log = Logger.getLogger(HMaster.class);
- log.addAppender(listAppender);
- log.setLevel(Level.WARN);
-
htd.setConfiguration("hbase.table.sanity.checks", Boolean.FALSE.toString());
checkTableIsLegal(htd);
- assertFalse(listAppender.getMessages().isEmpty());
- assertTrue(listAppender.getMessages().get(0).startsWith("MEMSTORE_FLUSHSIZE for table "
+ verify(masterLogger).warn(contains("MEMSTORE_FLUSHSIZE for table "
+ "descriptor or \"hbase.hregion.memstore.flush.size\" (0) is too small, which might "
+ "cause very frequent flushing."));
-
- log.removeAppender(listAppender);
}
private void checkTableIsLegal(HTableDescriptor htd) throws IOException {
@@ -198,26 +192,4 @@ public class TestIllegalTableDescriptor {
}
assertFalse(admin.tableExists(htd.getTableName()));
}
-
- private static class ListAppender extends AppenderSkeleton {
- private final List messages = new ArrayList<>();
-
- @Override
- protected void append(LoggingEvent event) {
- messages.add(event.getMessage().toString());
- }
-
- @Override
- public void close() {
- }
-
- @Override
- public boolean requiresLayout() {
- return false;
- }
-
- public List getMessages() {
- return messages;
- }
- }
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIncrementsFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIncrementsFromClientSide.java
index 14f5d6752c..d69d01eeb7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIncrementsFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIncrementsFromClientSide.java
@@ -30,8 +30,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
@@ -51,6 +49,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Run Increment tests that use the HBase clients; {@link HTable}.
@@ -63,7 +63,7 @@ import org.junit.rules.TestName;
*/
@Category(LargeTests.class)
public class TestIncrementsFromClientSide {
- final Log LOG = LogFactory.getLog(getClass());
+ final Logger LOG = LoggerFactory.getLogger(getClass());
protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static byte [] ROW = Bytes.toBytes("testRow");
private static byte [] FAMILY = Bytes.toBytes("testFamily");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestLeaseRenewal.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestLeaseRenewal.java
index 87d8a6e9ac..661d3f157c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestLeaseRenewal.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestLeaseRenewal.java
@@ -24,8 +24,6 @@ import static org.junit.Assert.assertTrue;
import java.util.Arrays;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CompatibilityFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
@@ -43,12 +41,14 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category(LargeTests.class)
public class TestLeaseRenewal {
public MetricsAssertHelper HELPER = CompatibilityFactory.getInstance(MetricsAssertHelper.class);
- final Log LOG = LogFactory.getLog(getClass());
+ final Logger LOG = LoggerFactory.getLogger(getClass());
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static byte[] FAMILY = Bytes.toBytes("testFamily");
private static final byte[] ANOTHERROW = Bytes.toBytes("anotherrow");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaWithReplicas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaWithReplicas.java
index 5660a017ca..62a42ce447 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaWithReplicas.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaWithReplicas.java
@@ -30,8 +30,6 @@ import java.util.EnumSet;
import java.util.List;
import java.util.concurrent.ExecutorService;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
@@ -67,6 +65,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import edu.umd.cs.findbugs.annotations.Nullable;
@@ -79,7 +79,7 @@ public class TestMetaWithReplicas {
withTimeout(this.getClass()).
withLookingForStuckThread(true).
build();
- private static final Log LOG = LogFactory.getLog(TestMetaWithReplicas.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestMetaWithReplicas.class);
private final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@Rule
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobSnapshotCloneIndependence.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobSnapshotCloneIndependence.java
index 3bd8f2e238..e22391ef3e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobSnapshotCloneIndependence.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobSnapshotCloneIndependence.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.hbase.client;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.TableName;
@@ -32,13 +30,15 @@ import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test to verify that the cloned table is independent of the table from which it was cloned
*/
@Category(LargeTests.class)
public class TestMobSnapshotCloneIndependence extends TestSnapshotCloneIndependence {
- private static final Log LOG = LogFactory.getLog(TestMobSnapshotCloneIndependence.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestMobSnapshotCloneIndependence.class);
@ClassRule
public static final TestRule timeout =
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobSnapshotFromClient.java
index 268bc14ea9..827c932f58 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobSnapshotFromClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobSnapshotFromClient.java
@@ -17,8 +17,6 @@
*/
package org.apache.hadoop.hbase.client;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.mob.MobConstants;
import org.apache.hadoop.hbase.snapshot.MobSnapshotTestingUtils;
@@ -26,6 +24,8 @@ import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.junit.BeforeClass;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
@@ -35,7 +35,7 @@ import org.junit.experimental.categories.Category;
*/
@Category({LargeTests.class, ClientTests.class})
public class TestMobSnapshotFromClient extends TestSnapshotFromClient {
- private static final Log LOG = LogFactory.getLog(TestMobSnapshotFromClient.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestMobSnapshotFromClient.class);
/**
* Setup the config for the cluster
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
index cfa7f37358..14ab6c903b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
@@ -30,8 +30,6 @@ import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ThreadPoolExecutor;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -53,10 +51,12 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MediumTests.class, FlakeyTests.class})
public class TestMultiParallel {
- private static final Log LOG = LogFactory.getLog(TestMultiParallel.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestMultiParallel.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final byte[] VALUE = Bytes.toBytes("value");
@@ -111,7 +111,7 @@ public class TestMultiParallel {
// Don't use integer as a multiple, so that we have a number of keys that is
// not a multiple of the number of regions
- int numKeys = (int) ((float) starterKeys.length * 10.33F);
+ int numKeys = (int) (starterKeys.length * 10.33F);
List keys = new ArrayList<>();
for (int i = 0; i < numKeys; i++) {
@@ -232,7 +232,7 @@ public class TestMultiParallel {
table.batch(actions, r);
fail();
} catch (RetriesExhaustedWithDetailsException ex) {
- LOG.debug(ex);
+ LOG.debug(ex.toString(), ex);
// good!
assertFalse(ex.mayHaveClusterIssues());
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java
index 807d59afe6..ee39a83c37 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java
@@ -25,8 +25,6 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.LargeTests;
@@ -39,6 +37,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Run tests related to {@link org.apache.hadoop.hbase.filter.TimestampsFilter} using HBase client APIs.
@@ -47,7 +47,7 @@ import org.junit.rules.TestName;
*/
@Category({LargeTests.class, ClientTests.class})
public class TestMultipleTimestamps {
- private static final Log LOG = LogFactory.getLog(TestMultipleTimestamps.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestMultipleTimestamps.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@Rule
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java
index 22d0e8e1de..ba4a8c2977 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java
@@ -30,8 +30,6 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
@@ -66,10 +64,12 @@ import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MediumTests.class, ClientTests.class})
public class TestReplicaWithCluster {
- private static final Log LOG = LogFactory.getLog(TestReplicaWithCluster.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestReplicaWithCluster.class);
private static final int NB_SERVERS = 3;
private static final byte[] row = TestReplicaWithCluster.class.getName().getBytes();
@@ -115,7 +115,7 @@ public class TestReplicaWithCluster {
}
}
} catch (InterruptedException e1) {
- LOG.error(e1);
+ LOG.error(e1.toString(), e1);
}
} else {
LOG.info("We're not the primary replicas.");
@@ -555,7 +555,7 @@ public class TestReplicaWithCluster {
try {
Thread.sleep(2 * REFRESH_PERIOD);
} catch (InterruptedException e1) {
- LOG.error(e1);
+ LOG.error(e1.toString(), e1);
}
// But if we ask for stale we will get it
@@ -590,7 +590,7 @@ public class TestReplicaWithCluster {
try {
Thread.sleep(2 * REFRESH_PERIOD);
} catch (InterruptedException e1) {
- LOG.error(e1);
+ LOG.error(e1.toString(), e1);
}
// But if we ask for stale we will get it
@@ -636,7 +636,7 @@ public class TestReplicaWithCluster {
try {
Thread.sleep(2 * REFRESH_PERIOD);
} catch (InterruptedException e1) {
- LOG.error(e1);
+ LOG.error(e1.toString(), e1);
}
try {
@@ -768,7 +768,7 @@ public class TestReplicaWithCluster {
try {
Thread.sleep(2 * REFRESH_PERIOD);
} catch (InterruptedException e1) {
- LOG.error(e1);
+ LOG.error(e1.toString(), e1);
}
// Simulating the RS down
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java
index a34b651352..a06055ded2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java
@@ -35,9 +35,7 @@ import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import com.codahale.metrics.Counter;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.commons.logging.impl.Log4JLogger;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -62,7 +60,6 @@ import org.apache.hadoop.hbase.regionserver.TestRegionServerNoMaster;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.log4j.Level;
import org.apache.zookeeper.KeeperException;
import org.junit.After;
import org.junit.AfterClass;
@@ -71,6 +68,8 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tests for region replicas. Sad that we cannot isolate these without bringing up a whole
@@ -79,11 +78,7 @@ import org.junit.experimental.categories.Category;
@Category({MediumTests.class, ClientTests.class})
@SuppressWarnings("deprecation")
public class TestReplicasClient {
- private static final Log LOG = LogFactory.getLog(TestReplicasClient.class);
-
- static {
- ((Log4JLogger)RpcRetryingCallerImpl.LOG).getLogger().setLevel(Level.ALL);
- }
+ private static final Logger LOG = LoggerFactory.getLogger(TestReplicasClient.class);
private static final int NB_SERVERS = 1;
private static Table table = null;
@@ -161,7 +156,7 @@ public class TestReplicasClient {
}
}
} catch (InterruptedException e1) {
- LOG.error(e1);
+ LOG.error(e1.toString(), e1);
}
} else {
LOG.info("We're not the primary replicas.");
@@ -175,7 +170,7 @@ public class TestReplicasClient {
}
}
} catch (InterruptedException e1) {
- LOG.error(e1);
+ LOG.error(e1.toString(), e1);
}
}
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResult.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResult.java
index e87602e591..33352985a6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResult.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResult.java
@@ -29,8 +29,6 @@ import java.util.NoSuchElementException;
import junit.framework.TestCase;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellScanner;
@@ -40,11 +38,13 @@ import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({SmallTests.class, ClientTests.class})
public class TestResult extends TestCase {
- private static final Log LOG = LogFactory.getLog(TestResult.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(TestResult.class.getName());
static KeyValue[] genKVs(final byte[] row, final byte[] family,
final byte[] value,
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java
index 14214d5d2c..d2191e18e8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.client;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
@@ -37,6 +35,8 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test various scanner timeout issues.
@@ -47,7 +47,7 @@ public class TestScannerTimeout {
private final static HBaseTestingUtility
TEST_UTIL = new HBaseTestingUtility();
- private static final Log LOG = LogFactory.getLog(TestScannerTimeout.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestScannerTimeout.class);
private final static byte[] SOME_BYTES = Bytes.toBytes("f");
private final static TableName TABLE_NAME = TableName.valueOf("t");
private final static int NB_ROWS = 10;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java
index 43be5731d0..f0060e4d88 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java
@@ -29,8 +29,6 @@ import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
import java.util.stream.IntStream;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -57,13 +55,15 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A client-side test, mostly testing scanners with various parameters.
*/
@Category({MediumTests.class, ClientTests.class})
public class TestScannersFromClientSide {
- private static final Log LOG = LogFactory.getLog(TestScannersFromClientSide.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestScannersFromClientSide.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static byte [] ROW = Bytes.toBytes("testRow");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSizeFailures.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSizeFailures.java
index 4ef354998e..75a3b8e6bd 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSizeFailures.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSizeFailures.java
@@ -25,8 +25,6 @@ import java.util.LinkedList;
import java.util.List;
import java.util.Map.Entry;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -38,12 +36,13 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
@Category(LargeTests.class)
public class TestSizeFailures {
- private static final Log LOG = LogFactory.getLog(TestSizeFailures.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSizeFailures.class);
protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static byte [] FAMILY = Bytes.toBytes("testFamily");
protected static int SLAVES = 1;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSmallReversedScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSmallReversedScanner.java
index fd9f614151..b050397f57 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSmallReversedScanner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSmallReversedScanner.java
@@ -17,8 +17,6 @@
*/
package org.apache.hadoop.hbase.client;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.testclassification.MediumTests;
@@ -29,12 +27,14 @@ import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
@Category(MediumTests.class)
public class TestSmallReversedScanner {
- public static final Log LOG = LogFactory.getLog(TestSmallReversedScanner.class);
+ public static final Logger LOG = LoggerFactory.getLogger(TestSmallReversedScanner.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final TableName TABLE_NAME = TableName.valueOf("testReversedSmall");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java
index 5688617fec..da422f3f5d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.client;
import java.util.List;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -51,13 +49,15 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test to verify that the cloned table is independent of the table from which it was cloned
*/
@Category({LargeTests.class, ClientTests.class})
public class TestSnapshotCloneIndependence {
- private static final Log LOG = LogFactory.getLog(TestSnapshotCloneIndependence.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSnapshotCloneIndependence.class);
@ClassRule
public static final TestRule timeout =
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java
index baba195404..0e6f2e9778 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java
@@ -26,8 +26,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -56,6 +54,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test create/using/deleting snapshots from the client
@@ -64,7 +64,7 @@ import org.junit.rules.TestName;
*/
@Category({LargeTests.class, ClientTests.class})
public class TestSnapshotFromClient {
- private static final Log LOG = LogFactory.getLog(TestSnapshotFromClient.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSnapshotFromClient.class);
protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
protected static final int NUM_RS = 2;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java
index 99c4340a14..49c656067b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java
@@ -25,8 +25,6 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -49,13 +47,15 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test class to verify that metadata is consistent before and after a snapshot attempt.
*/
@Category({MediumTests.class, ClientTests.class})
public class TestSnapshotMetadata {
- private static final Log LOG = LogFactory.getLog(TestSnapshotMetadata.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSnapshotMetadata.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final int NUM_RS = 2;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableFavoredNodes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableFavoredNodes.java
index ba07755c13..cd828bffd1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableFavoredNodes.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableFavoredNodes.java
@@ -32,8 +32,6 @@ import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
@@ -64,7 +62,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
@@ -72,7 +71,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
@Category({ClientTests.class, MediumTests.class})
public class TestTableFavoredNodes {
- private static final Log LOG = LogFactory.getLog(TestTableFavoredNodes.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestTableFavoredNodes.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private final static int WAIT_TIMEOUT = 60000;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableSnapshotScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableSnapshotScanner.java
index 535a34d703..56a0792baf 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableSnapshotScanner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableSnapshotScanner.java
@@ -22,8 +22,6 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -42,11 +40,13 @@ import org.junit.After;
import org.junit.Assert;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({LargeTests.class, ClientTests.class})
public class TestTableSnapshotScanner {
- private static final Log LOG = LogFactory.getLog(TestTableSnapshotScanner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestTableSnapshotScanner.class);
private final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final int NUM_REGION_SERVERS = 2;
private static final byte[][] FAMILIES = {Bytes.toBytes("f1"), Bytes.toBytes("f2")};
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java
index 0a1fafe398..89af5de61c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java
@@ -25,8 +25,6 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.TimestampsFilter;
@@ -41,6 +39,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Run tests related to {@link TimestampsFilter} using HBase client APIs.
@@ -49,7 +49,7 @@ import org.junit.rules.TestName;
*/
@Category({MediumTests.class, ClientTests.class})
public class TestTimestampsFilter {
- private static final Log LOG = LogFactory.getLog(TestTimestampsFilter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestTimestampsFilter.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@Rule
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestUpdateConfiguration.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestUpdateConfiguration.java
index d54cb53a4b..6511a42ce3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestUpdateConfiguration.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestUpdateConfiguration.java
@@ -26,8 +26,6 @@ import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.ServerName;
@@ -35,10 +33,12 @@ import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MediumTests.class})
public class TestUpdateConfiguration {
- private static final Log LOG = LogFactory.getLog(TestUpdateConfiguration.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestUpdateConfiguration.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@BeforeClass
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/locking/TestEntityLocks.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/locking/TestEntityLocks.java
index 4ac3654203..306e1dc73f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/locking/TestEntityLocks.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/locking/TestEntityLocks.java
@@ -33,8 +33,6 @@ import static org.mockito.Mockito.when;
import java.util.Random;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -55,10 +53,12 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.ArgumentCaptor;
import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ClientTests.class, SmallTests.class})
public class TestEntityLocks {
- private static final Log LOG = LogFactory.getLog(TestEntityLocks.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestEntityLocks.class);
private final Configuration conf = HBaseConfiguration.create();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdmin.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdmin.java
index 83a2e12dd5..67c635b9f7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdmin.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdmin.java
@@ -27,8 +27,6 @@ import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
@@ -50,6 +48,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
@@ -64,8 +64,8 @@ import static org.junit.Assert.fail;
@Category({MediumTests.class, ClientTests.class})
public class TestReplicationAdmin {
- private static final Log LOG =
- LogFactory.getLog(TestReplicationAdmin.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestReplicationAdmin.class);
private final static HBaseTestingUtility TEST_UTIL =
new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java
index c035f29a46..bba27fe6c1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java
@@ -24,9 +24,9 @@ import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.KeyValue;
@@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.util.Bytes;
public class CodecPerformance {
/** @deprecated LOG variable would be made private. since 1.2, remove in 3.0 */
@Deprecated
- public static final Log LOG = LogFactory.getLog(CodecPerformance.class);
+ public static final Logger LOG = LoggerFactory.getLogger(CodecPerformance.class);
static Cell [] getCells(final int howMany) {
Cell [] cells = new Cell[howMany];
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/TestCellMessageCodec.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/TestCellMessageCodec.java
index 4a42b68768..24aa192341 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/TestCellMessageCodec.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/TestCellMessageCodec.java
@@ -27,8 +27,6 @@ import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
@@ -37,13 +35,14 @@ import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingInputStream;
import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingOutputStream;
@Category({MiscTests.class, SmallTests.class})
public class TestCellMessageCodec {
- private static final Log LOG = LogFactory.getLog(TestCellMessageCodec.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestCellMessageCodec.class);
@Test
public void testEmptyWorks() throws IOException {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/conf/TestConfigurationManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/conf/TestConfigurationManager.java
index ab4ebc58b4..1f8dbc40fc 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/conf/TestConfigurationManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/conf/TestConfigurationManager.java
@@ -21,17 +21,17 @@ package org.apache.hadoop.hbase.conf;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({SmallTests.class, ClientTests.class})
public class TestConfigurationManager {
- private static final Log LOG = LogFactory.getLog(TestConfigurationManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestConfigurationManager.class);
class DummyConfigurationObserver implements ConfigurationObserver {
private boolean notifiedOnChange = false;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java
index c2a16af7ba..35bcd77b15 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java
@@ -24,8 +24,6 @@ import static org.junit.Assert.fail;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
@@ -42,14 +40,16 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Do the complex testing of constraints against a minicluster
*/
@Category({MiscTests.class, MediumTests.class})
public class TestConstraint {
- private static final Log LOG = LogFactory
- .getLog(TestConstraint.class);
+ private static final Logger LOG = LoggerFactory
+ .getLogger(TestConstraint.class);
private static HBaseTestingUtility util;
private static final TableName tableName = TableName.valueOf("test");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALCoprocessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALCoprocessor.java
index 9f58fc4a47..f3d90f6d61 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALCoprocessor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALCoprocessor.java
@@ -24,8 +24,6 @@ import java.util.Arrays;
import java.util.List;
import java.util.Optional;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
@@ -34,6 +32,8 @@ import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.wal.WALKey;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Class for testing WALObserver coprocessor. It will monitor WAL writing and restoring, and modify
@@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.wal.WALKey;
public class SampleRegionWALCoprocessor implements WALCoprocessor, RegionCoprocessor,
WALObserver, RegionObserver {
- private static final Log LOG = LogFactory.getLog(SampleRegionWALCoprocessor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SampleRegionWALCoprocessor.class);
private byte[] tableName;
private byte[] row;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
index 61b4808515..44216ec72c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
@@ -35,8 +35,6 @@ import java.util.Map;
import java.util.Optional;
import java.util.concurrent.ConcurrentMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
@@ -73,11 +71,13 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({CoprocessorTests.class, SmallTests.class})
public class TestCoprocessorInterface {
@Rule public TestName name = new TestName();
- private static final Log LOG = LogFactory.getLog(TestCoprocessorInterface.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestCoprocessorInterface.class);
private static final HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU();
static final Path DIR = TEST_UTIL.getDataTestDir();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorMetrics.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorMetrics.java
index b2c106255d..3789a2a3e6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorMetrics.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorMetrics.java
@@ -25,8 +25,7 @@ import java.util.Optional;
import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
import com.google.protobuf.ServiceException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
@@ -71,6 +70,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
@@ -84,7 +85,7 @@ import static org.junit.Assert.assertTrue;
@Category({CoprocessorTests.class, MediumTests.class})
public class TestCoprocessorMetrics {
- private static final Log LOG = LogFactory.getLog(TestCoprocessorMetrics.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestCoprocessorMetrics.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final byte[] foo = Bytes.toBytes("foo");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorStop.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorStop.java
index 1d8acdfe3a..f96da7009f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorStop.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorStop.java
@@ -20,8 +20,7 @@
package org.apache.hadoop.hbase.coprocessor;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.fs.Path;
@@ -33,6 +32,9 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import static org.junit.Assert.assertTrue;
@@ -42,7 +44,7 @@ import static org.junit.Assert.assertTrue;
*/
@Category({CoprocessorTests.class, MediumTests.class})
public class TestCoprocessorStop {
- private static final Log LOG = LogFactory.getLog(TestCoprocessorStop.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestCoprocessorStop.class);
private static HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final String MASTER_FILE =
"master" + System.currentTimeMillis();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
index d24711a6fd..0309eaa512 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
@@ -30,8 +30,6 @@ import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -70,7 +68,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest;
@@ -83,7 +82,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNa
*/
@Category({CoprocessorTests.class, MediumTests.class})
public class TestMasterObserver {
- private static final Log LOG = LogFactory.getLog(TestMasterObserver.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestMasterObserver.class);
public static CountDownLatch tableCreationLatch = new CountDownLatch(1);
public static CountDownLatch tableDeletionLatch = new CountDownLatch(1);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestNegativeMemStoreSizeWithSlowCoprocessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestNegativeMemStoreSizeWithSlowCoprocessor.java
index 30b3d71a85..877265d29b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestNegativeMemStoreSizeWithSlowCoprocessor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestNegativeMemStoreSizeWithSlowCoprocessor.java
@@ -12,8 +12,6 @@ package org.apache.hadoop.hbase.coprocessor;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
@@ -31,6 +29,8 @@ import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test that verifies we do not have memstore size negative when a postPut/Delete hook is
@@ -39,8 +39,9 @@ import org.junit.experimental.categories.Category;
*/
@Category(LargeTests.class)
public class TestNegativeMemStoreSizeWithSlowCoprocessor {
+ static final Logger LOG =
+ LoggerFactory.getLogger(TestNegativeMemStoreSizeWithSlowCoprocessor.class);
- static final Log LOG = LogFactory.getLog(TestNegativeMemStoreSizeWithSlowCoprocessor.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final byte[] tableName = Bytes.toBytes("test_table");
private static final byte[] family = Bytes.toBytes("f");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverForAddingMutationsFromCoprocessors.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverForAddingMutationsFromCoprocessors.java
index 3e1621cf55..07a1fa99f4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverForAddingMutationsFromCoprocessors.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverForAddingMutationsFromCoprocessors.java
@@ -23,8 +23,6 @@ import java.util.Arrays;
import java.util.List;
import java.util.Optional;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -53,6 +51,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
@@ -60,8 +60,8 @@ import static org.junit.Assert.assertNotNull;
@Category(MediumTests.class)
public class TestRegionObserverForAddingMutationsFromCoprocessors {
- private static final Log LOG
- = LogFactory.getLog(TestRegionObserverForAddingMutationsFromCoprocessors.class);
+ private static final Logger LOG
+ = LoggerFactory.getLogger(TestRegionObserverForAddingMutationsFromCoprocessors.class);
private static HBaseTestingUtility util;
private static final byte[] dummy = Bytes.toBytes("dummy");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
index b55d8019d0..5f87d7ded1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
@@ -30,8 +30,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -90,12 +88,13 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@Category({ CoprocessorTests.class, MediumTests.class })
public class TestRegionObserverInterface {
- private static final Log LOG = LogFactory.getLog(TestRegionObserverInterface.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRegionObserverInterface.class);
@Rule
public final TestRule timeout = CategoryBasedTimeout.builder().
withTimeout(this.getClass()).withLookingForStuckThread(true).build();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java
index aea09bbd14..ef0c3eb05e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java
@@ -23,8 +23,6 @@ import static org.junit.Assert.fail;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -43,6 +41,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Assert;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tests unhandled exceptions thrown by coprocessors running on a regionserver..
@@ -52,7 +52,7 @@ import org.junit.experimental.categories.Category;
*/
@Category({CoprocessorTests.class, MediumTests.class})
public class TestRegionServerCoprocessorExceptionWithAbort {
- private static final Log LOG = LogFactory.getLog(
+ private static final Logger LOG = LoggerFactory.getLogger(
TestRegionServerCoprocessorExceptionWithAbort.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final TableName TABLE_NAME = TableName.valueOf("observed_table");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
index 0dd2c8cd8d..86a0d391f1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
@@ -32,8 +32,6 @@ import java.util.Map;
import java.util.NavigableMap;
import java.util.TreeMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -74,6 +72,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tests invocation of the
@@ -82,7 +82,7 @@ import org.junit.rules.TestName;
*/
@Category({CoprocessorTests.class, MediumTests.class})
public class TestWALObserver {
- private static final Log LOG = LogFactory.getLog(TestWALObserver.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestWALObserver.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static byte[] TEST_TABLE = Bytes.toBytes("observedTable");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionDispatcher.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionDispatcher.java
index 650e4d6212..8ec1a44f09 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionDispatcher.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionDispatcher.java
@@ -20,13 +20,13 @@ package org.apache.hadoop.hbase.errorhandling;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test that we propagate errors through an dispatcher exactly once via different failure
@@ -34,7 +34,7 @@ import org.mockito.Mockito;
*/
@Category({MasterTests.class, SmallTests.class})
public class TestForeignExceptionDispatcher {
- private static final Log LOG = LogFactory.getLog(TestForeignExceptionDispatcher.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestForeignExceptionDispatcher.class);
/**
* Exception thrown from the test
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestTimeoutExceptionInjector.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestTimeoutExceptionInjector.java
index 37af804cb6..27bc6e1c7a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestTimeoutExceptionInjector.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestTimeoutExceptionInjector.java
@@ -19,13 +19,13 @@ package org.apache.hadoop.hbase.errorhandling;
import static org.junit.Assert.fail;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test the {@link TimeoutExceptionInjector} to ensure we fulfill contracts
@@ -33,7 +33,7 @@ import org.mockito.Mockito;
@Category({MasterTests.class, SmallTests.class})
public class TestTimeoutExceptionInjector {
- private static final Log LOG = LogFactory.getLog(TestTimeoutExceptionInjector.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestTimeoutExceptionInjector.class);
/**
* Test that a manually triggered timer fires an exception.
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java
index d3de539389..b0b17f9718 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java
@@ -25,8 +25,6 @@ import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.Waiter.Predicate;
@@ -36,12 +34,14 @@ import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.mockito.Mockito.*;
@Category({MiscTests.class, SmallTests.class})
public class TestExecutorService {
- private static final Log LOG = LogFactory.getLog(TestExecutorService.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestExecutorService.class);
@Test
public void testExecutorService() throws Exception {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterTestingCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterTestingCluster.java
index 7041c92da8..ac7928f931 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterTestingCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterTestingCluster.java
@@ -27,7 +27,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -38,14 +37,10 @@ import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.ScannerCallable;
import org.apache.hadoop.hbase.client.Table;
-import org.apache.hadoop.hbase.ipc.AbstractRpcClient;
-import org.apache.hadoop.hbase.ipc.RpcServer;
import org.apache.hadoop.hbase.testclassification.FilterTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.log4j.Level;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.experimental.categories.Category;
@@ -112,9 +107,6 @@ public class FilterTestingCluster {
@BeforeClass
public static void setUp() throws Exception {
- ((Log4JLogger)RpcServer.LOG).getLogger().setLevel(Level.ALL);
- ((Log4JLogger)AbstractRpcClient.LOG).getLogger().setLevel(Level.ALL);
- ((Log4JLogger)ScannerCallable.LOG).getLogger().setLevel(Level.ALL);
TEST_UTIL.startMiniCluster(1);
initialize(TEST_UTIL.getConfiguration());
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java
index f03a4f0e9d..7bcce946e0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java
@@ -26,8 +26,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
@@ -46,6 +44,8 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
class StringRange {
@@ -124,7 +124,7 @@ public class TestColumnRangeFilter {
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
- private static final Log LOG = LogFactory.getLog(TestColumnRangeFilter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestColumnRangeFilter.class);
@Rule
public TestName name = new TestName();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java
index d84fbe9481..ae90c63b71 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java
@@ -28,8 +28,6 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CompareOperator;
@@ -52,10 +50,12 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({FilterTests.class, SmallTests.class})
public class TestDependentColumnFilter {
- private static final Log LOG = LogFactory.getLog(TestDependentColumnFilter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestDependentColumnFilter.class);
private static final byte[][] ROWS = {
Bytes.toBytes("test1"),Bytes.toBytes("test2")
};
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
index b4d1935b07..b28c23c8bc 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
@@ -28,8 +28,6 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
@@ -63,13 +61,15 @@ import org.junit.experimental.categories.Category;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test filters at the HRegion doorstep.
*/
@Category({FilterTests.class, SmallTests.class})
public class TestFilter {
- private final static Log LOG = LogFactory.getLog(TestFilter.class);
+ private final static Logger LOG = LoggerFactory.getLogger(TestFilter.class);
private HRegion region;
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
index 2a13ac8a66..59c6de2988 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
@@ -18,8 +18,6 @@
*/
package org.apache.hadoop.hbase.filter;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
@@ -34,6 +32,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tests filter Lists in ways that rely on a MiniCluster. Where possible, favor tests in
@@ -42,7 +42,7 @@ import org.junit.rules.TestName;
@Category({ MediumTests.class, FilterTests.class })
public class TestFilterListOnMini {
- private static final Log LOG = LogFactory.getLog(TestFilterListOnMini.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestFilterListOnMini.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@Rule
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOrOperatorWithBlkCnt.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOrOperatorWithBlkCnt.java
index 39abc95f06..1f1e919feb 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOrOperatorWithBlkCnt.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOrOperatorWithBlkCnt.java
@@ -23,8 +23,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName;
@@ -44,6 +42,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/*
* This test is for the optimization added in HBASE-15243.
@@ -53,7 +53,8 @@ import org.junit.rules.TestName;
public class TestFilterListOrOperatorWithBlkCnt {
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
- private static final Log LOG = LogFactory.getLog(TestFilterListOrOperatorWithBlkCnt.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestFilterListOrOperatorWithBlkCnt.class);
private byte[] family = Bytes.toBytes("family");
private byte[] qf = Bytes.toBytes("qf");
private byte[] value = Bytes.toBytes("val");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java
index 79dc36f688..29845c960d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java
@@ -27,8 +27,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.TableName;
@@ -43,14 +41,16 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test if Filter is incompatible with scan-limits
*/
@Category({FilterTests.class, MediumTests.class})
public class TestFilterWithScanLimits extends FilterTestingCluster {
- private static final Log LOG = LogFactory
- .getLog(TestFilterWithScanLimits.class);
+ private static final Logger LOG = LoggerFactory
+ .getLogger(TestFilterWithScanLimits.class);
private static final TableName tableName = TableName.valueOf("scanWithLimit");
private static final String columnFamily = "f1";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java
index e779706ebb..4556776961 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java
@@ -23,8 +23,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
@@ -55,6 +53,8 @@ import org.junit.Test;
import static org.junit.Assert.*;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test if the FilterWrapper retains the same semantics defined in the
@@ -62,7 +62,7 @@ import org.junit.experimental.categories.Category;
*/
@Category({FilterTests.class, MediumTests.class})
public class TestFilterWrapper {
- private static final Log LOG = LogFactory.getLog(TestFilterWrapper.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestFilterWrapper.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static Configuration conf = null;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java
index 36cf068395..a8c8afc7eb 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java
@@ -25,8 +25,6 @@ import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -51,13 +49,15 @@ import org.junit.experimental.categories.Category;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
*/
@Category({FilterTests.class, MediumTests.class})
public class TestFuzzyRowAndColumnRangeFilter {
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
- private static final Log LOG = LogFactory.getLog(TestFuzzyRowAndColumnRangeFilter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestFuzzyRowAndColumnRangeFilter.class);
@Rule
public TestName name = new TestName();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java
index b043e073ac..673857804b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java
@@ -17,8 +17,6 @@
*/
package org.apache.hadoop.hbase.filter;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
@@ -48,6 +46,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.ByteBuffer;
@@ -63,7 +63,7 @@ import static org.junit.Assert.assertEquals;
public class TestFuzzyRowFilterEndToEnd {
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private final static byte fuzzyValue = (byte) 63;
- private static final Log LOG = LogFactory.getLog(TestFuzzyRowFilterEndToEnd.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestFuzzyRowFilterEndToEnd.class);
private static int firstPartCardinality = 50;
private static int secondPartCardinality = 50;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java
index a5d04d2133..ff9db7572c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java
@@ -24,8 +24,6 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
@@ -46,12 +44,14 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category(MediumTests.class)
public class TestMultiRowRangeFilter {
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
- private static final Log LOG = LogFactory.getLog(TestMultiRowRangeFilter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestMultiRowRangeFilter.class);
private byte[] family = Bytes.toBytes("family");
private byte[] qf = Bytes.toBytes("qf");
private byte[] value = Bytes.toBytes("val");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestScanRowPrefix.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestScanRowPrefix.java
index db12989f5c..495f63fd89 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestScanRowPrefix.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestScanRowPrefix.java
@@ -19,8 +19,6 @@
*/
package org.apache.hadoop.hbase.filter;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.commons.codec.binary.Hex;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Put;
@@ -35,6 +33,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
@@ -46,8 +46,8 @@ import java.util.List;
*/
@Category({FilterTests.class, MediumTests.class})
public class TestScanRowPrefix extends FilterTestingCluster {
- private static final Log LOG = LogFactory
- .getLog(TestScanRowPrefix.class);
+ private static final Logger LOG = LoggerFactory
+ .getLogger(TestScanRowPrefix.class);
@Rule
public TestName name = new TestName();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
index a6011ad79e..b24d30bdce 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
@@ -28,9 +28,6 @@ import java.net.ServerSocket;
import java.util.List;
import java.util.concurrent.CountDownLatch;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataInputStream;
@@ -64,7 +61,6 @@ import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.ipc.RemoteException;
-import org.apache.log4j.Level;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
@@ -72,18 +68,15 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tests for the hdfs fix from HBASE-6435.
*/
@Category({MiscTests.class, LargeTests.class})
public class TestBlockReorder {
- private static final Log LOG = LogFactory.getLog(TestBlockReorder.class);
-
- static {
- ((Log4JLogger) DFSClient.LOG).getLogger().setLevel(Level.ALL);
- ((Log4JLogger) HFileSystem.LOG).getLogger().setLevel(Level.ALL);
- }
+ private static final Logger LOG = LoggerFactory.getLogger(TestBlockReorder.class);
private Configuration conf;
private MiniDFSCluster cluster;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java
index da45fdadb7..94df090049 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java
@@ -40,8 +40,6 @@ import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReentrantReadWriteLock;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Mutation;
@@ -70,6 +68,8 @@ import org.apache.hadoop.hbase.util.ClassSize;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Testing the sizing that HeapSize offers and compares to the size given by
@@ -77,7 +77,7 @@ import org.junit.experimental.categories.Category;
*/
@Category({IOTests.class, SmallTests.class})
public class TestHeapSize {
- private static final Log LOG = LogFactory.getLog(TestHeapSize.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHeapSize.class);
// List of classes implementing HeapSize
// BatchOperation, BatchUpdate, BlockIndex, Entry, Entry, HStoreKey
// KeyValue, LruBlockCache, Put, WALKey
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestFanOutOneBlockAsyncDFSOutput.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestFanOutOneBlockAsyncDFSOutput.java
index 6c19037465..75bd7ff7dc 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestFanOutOneBlockAsyncDFSOutput.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestFanOutOneBlockAsyncDFSOutput.java
@@ -36,8 +36,6 @@ import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ThreadLocalRandom;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -54,7 +52,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.io.netty.channel.Channel;
import org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoop;
import org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoopGroup;
@@ -64,7 +63,7 @@ import org.apache.hadoop.hbase.shaded.io.netty.channel.socket.nio.NioSocketChann
@Category({ MiscTests.class, MediumTests.class })
public class TestFanOutOneBlockAsyncDFSOutput {
- private static final Log LOG = LogFactory.getLog(TestFanOutOneBlockAsyncDFSOutput.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestFanOutOneBlockAsyncDFSOutput.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
index 1901dcea7c..b7b0998f9b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
@@ -16,8 +16,6 @@
*/
package org.apache.hadoop.hbase.io.encoding;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
@@ -43,6 +41,8 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
@@ -58,7 +58,7 @@ import static org.junit.Assert.assertTrue;
*/
@Category({IOTests.class, LargeTests.class})
public class TestChangingEncoding {
- private static final Log LOG = LogFactory.getLog(TestChangingEncoding.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestChangingEncoding.class);
static final String CF = "EncodingTestCF";
static final byte[] CF_BYTES = Bytes.toBytes(CF);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java
index 27fd46d4bd..f41db93820 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java
@@ -30,8 +30,6 @@ import java.util.Collection;
import java.util.List;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.Cell;
@@ -59,6 +57,8 @@ import org.junit.rules.TestRule;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
@@ -69,7 +69,7 @@ import org.junit.runners.Parameterized.Parameters;
@RunWith(Parameterized.class)
public class TestDataBlockEncoders {
- private static final Log LOG = LogFactory.getLog(TestDataBlockEncoders.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestDataBlockEncoders.class);
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().
withTimeout(this.getClass()).withLookingForStuckThread(true).build();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.java
index dab867327f..380e6fbbd6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.java
@@ -22,11 +22,11 @@ import static org.junit.Assert.*;
import java.io.IOException;
import java.util.Map;
import java.util.NavigableSet;
+import java.util.Objects;
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.databind.JsonMappingException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
@@ -38,10 +38,12 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({IOTests.class, SmallTests.class})
public class TestBlockCacheReporting {
- private static final Log LOG = LogFactory.getLog(TestBlockCacheReporting.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestBlockCacheReporting.class);
private Configuration conf;
@Before
@@ -86,9 +88,9 @@ public class TestBlockCacheReporting {
final int count = 3;
addDataAndHits(cc.getBlockCache(), count);
// The below has no asserts. It is just exercising toString and toJSON code.
- LOG.info(cc.getBlockCache().getStats());
+ LOG.info(Objects.toString(cc.getBlockCache().getStats()));
BlockCacheUtil.CachedBlocksByFile cbsbf = logPerBlock(cc.getBlockCache());
- LOG.info(cbsbf);
+ LOG.info(Objects.toString(cbsbf));
logPerFile(cbsbf);
bucketCacheReport(cc.getBlockCache());
LOG.info(BlockCacheUtil.toJSON(cbsbf));
@@ -106,9 +108,9 @@ public class TestBlockCacheReporting {
BlockCache bc = cc.getBlockCache();
LOG.info("count=" + bc.getBlockCount() + ", currentSize=" + bc.getCurrentSize() +
", freeSize=" + bc.getFreeSize() );
- LOG.info(cc.getBlockCache().getStats());
+ LOG.info(Objects.toString(cc.getBlockCache().getStats()));
BlockCacheUtil.CachedBlocksByFile cbsbf = logPerBlock(cc.getBlockCache());
- LOG.info(cbsbf);
+ LOG.info(Objects.toString(cbsbf));
logPerFile(cbsbf);
bucketCacheReport(cc.getBlockCache());
LOG.info(BlockCacheUtil.toJSON(cbsbf));
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java
index e1ae654b5b..e07d2aa531 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java
@@ -28,8 +28,6 @@ import java.lang.management.ManagementFactory;
import java.lang.management.MemoryUsage;
import java.nio.ByteBuffer;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -49,6 +47,8 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tests that {@link CacheConfig} does as expected.
@@ -59,7 +59,7 @@ import org.junit.experimental.categories.Category;
// tests clash on the global variable if this test is run as small sized test.
@Category({IOTests.class, LargeTests.class})
public class TestCacheConfig {
- private static final Log LOG = LogFactory.getLog(TestCacheConfig.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestCacheConfig.class);
private Configuration conf;
static class Deserializer implements CacheableDeserializer {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
index 9535a461c4..74a310d980 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
@@ -34,8 +34,6 @@ import java.util.List;
import java.util.Map;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -71,7 +69,8 @@ import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
/**
@@ -82,7 +81,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@Category({IOTests.class, MediumTests.class})
public class TestCacheOnWrite {
- private static final Log LOG = LogFactory.getLog(TestCacheOnWrite.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestCacheOnWrite.class);
private static final HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU();
private Configuration conf;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java
index 5111e365ea..d48c5f3b0d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java
@@ -34,8 +34,6 @@ import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
@@ -52,10 +50,12 @@ import org.apache.hadoop.hbase.util.ChecksumType;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({IOTests.class, SmallTests.class})
public class TestChecksum {
- private static final Log LOG = LogFactory.getLog(TestHFileBlock.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHFileBlock.class);
static final Compression.Algorithm[] COMPRESSION_ALGORITHMS = {
NONE, GZ };
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java
index fcc09d550a..8b2a3af405 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java
@@ -31,8 +31,6 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
@@ -47,12 +45,14 @@ import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@RunWith(Parameterized.class)
@Category({IOTests.class, SmallTests.class})
public class TestFixedFileTrailer {
- private static final Log LOG = LogFactory.getLog(TestFixedFileTrailer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestFixedFileTrailer.class);
private static final int MAX_COMPARATOR_NAME_LENGTH = 128;
/**
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java
index cc4a71683e..30501e2e08 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java
@@ -29,10 +29,9 @@ import java.io.DataOutput;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;
+import java.util.Objects;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -65,6 +64,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* test hfile features.
@@ -74,7 +75,7 @@ public class TestHFile {
@Rule public TestName testName = new TestName();
- private static final Log LOG = LogFactory.getLog(TestHFile.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHFile.class);
private static final int NUM_VALID_KEY_TYPES = KeyValue.Type.values().length - 2;
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static String ROOT_DIR =
@@ -322,7 +323,7 @@ public class TestHFile {
.withFileContext(meta)
.withComparator(CellComparatorImpl.COMPARATOR)
.create();
- LOG.info(writer);
+ LOG.info(Objects.toString(writer));
writeRecords(writer, useTags);
fout.close();
FSDataInputStream fin = fs.open(ncHFile);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
index 8a2d721b5a..9c36788940 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
@@ -40,8 +40,6 @@ import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
@@ -75,6 +73,8 @@ import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({IOTests.class, MediumTests.class})
@RunWith(Parameterized.class)
@@ -83,7 +83,7 @@ public class TestHFileBlock {
private static final boolean detailedLogging = false;
private static final boolean[] BOOLEAN_VALUES = new boolean[] { false, true };
- private static final Log LOG = LogFactory.getLog(TestHFileBlock.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHFileBlock.class);
static final Compression.Algorithm[] COMPRESSION_ALGORITHMS = { NONE, GZ };
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
index c5bc9d7476..ad42a66ed7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
@@ -35,8 +35,6 @@ import java.util.List;
import java.util.Random;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -68,6 +66,8 @@ import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@RunWith(Parameterized.class)
@Category({IOTests.class, MediumTests.class})
@@ -82,7 +82,7 @@ public class TestHFileBlockIndex {
this.compr = compr;
}
- private static final Log LOG = LogFactory.getLog(TestHFileBlockIndex.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHFileBlockIndex.class);
private static final int NUM_DATA_BLOCKS = 1000;
private static final HBaseTestingUtility TEST_UTIL =
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java
index 2dd00731a4..bec774ee0a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java
@@ -30,8 +30,6 @@ import java.security.SecureRandom;
import java.util.List;
import java.util.UUID;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -54,10 +52,12 @@ import org.apache.hadoop.hbase.util.RedundantKVGenerator;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({IOTests.class, SmallTests.class})
public class TestHFileEncryption {
- private static final Log LOG = LogFactory.getLog(TestHFileEncryption.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHFileEncryption.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final SecureRandom RNG = new SecureRandom();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java
index e4a3908e48..710fe43fe8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java
@@ -32,8 +32,6 @@ import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -50,6 +48,8 @@ import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.io.BytesWritable;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* test the performance for seek.
@@ -73,7 +73,7 @@ public class TestHFileSeek extends TestCase {
private RandomDistribution.DiscreteRNG keyLenGen;
private KVGenerator kvGen;
- private static final Log LOG = LogFactory.getLog(TestHFileSeek.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHFileSeek.class);
@Override
public void setUp() throws IOException {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java
index 5f320c80db..5dade74a7b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java
@@ -31,8 +31,6 @@ import java.util.Collection;
import java.util.List;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
@@ -62,6 +60,8 @@ import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Testing writing a version 3 {@link HFile}.
@@ -70,7 +70,7 @@ import org.junit.runners.Parameterized.Parameters;
@Category({IOTests.class, SmallTests.class})
public class TestHFileWriterV3 {
- private static final Log LOG = LogFactory.getLog(TestHFileWriterV3.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHFileWriterV3.class);
private static final HBaseTestingUtility TEST_UTIL =
new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLazyDataBlockDecompression.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLazyDataBlockDecompression.java
index d92453a03a..42cc6e53b7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLazyDataBlockDecompression.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLazyDataBlockDecompression.java
@@ -17,9 +17,17 @@
*/
package org.apache.hadoop.hbase.io.hfile;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -38,15 +46,10 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-
-import static org.junit.Assert.*;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables;
/**
* A kind of integration test at the intersection of {@link HFileBlock}, {@link CacheConfig},
@@ -55,7 +58,7 @@ import static org.junit.Assert.*;
@Category({IOTests.class, SmallTests.class})
@RunWith(Parameterized.class)
public class TestLazyDataBlockDecompression {
- private static final Log LOG = LogFactory.getLog(TestLazyDataBlockDecompression.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestLazyDataBlockDecompression.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private FileSystem fs;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.java
index 7e2a0a52b8..106a879544 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.java
@@ -26,8 +26,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ByteBufferKeyValue;
@@ -56,10 +54,12 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ RegionServerTests.class, MediumTests.class })
public class TestScannerFromBucketCache {
- private static final Log LOG = LogFactory.getLog(TestScannerFromBucketCache.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestScannerFromBucketCache.class);
@Rule
public TestName name = new TestName();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java
index 459deeb5e4..5e88b14f03 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java
@@ -24,8 +24,6 @@ import java.util.Collection;
import java.util.List;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -48,6 +46,8 @@ import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test the optimization that does not scan files where all timestamps are
@@ -57,8 +57,8 @@ import org.junit.runners.Parameterized.Parameters;
@Category({IOTests.class, MediumTests.class})
public class TestScannerSelectionUsingTTL {
- private static final Log LOG =
- LogFactory.getLog(TestScannerSelectionUsingTTL.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestScannerSelectionUsingTTL.class);
private static final HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU();
private static TableName TABLE = TableName.valueOf("myTable");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekBeforeWithInlineBlocks.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekBeforeWithInlineBlocks.java
index f4309eaf9a..651cf02037 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekBeforeWithInlineBlocks.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekBeforeWithInlineBlocks.java
@@ -25,8 +25,6 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -44,11 +42,13 @@ import org.apache.hadoop.hbase.util.BloomFilterFactory;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({IOTests.class, MediumTests.class})
public class TestSeekBeforeWithInlineBlocks {
- private static final Log LOG = LogFactory.getLog(TestSeekBeforeWithInlineBlocks.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSeekBeforeWithInlineBlocks.class);
private static final HBaseTestingUtility TEST_UTIL =
new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/AbstractTestIPC.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/AbstractTestIPC.java
index 90fb2f3151..6cb63a6c38 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/AbstractTestIPC.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/AbstractTestIPC.java
@@ -36,8 +36,6 @@ import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
@@ -61,7 +59,8 @@ import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.util.StringUtils;
import org.junit.BeforeClass;
import org.junit.Test;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@@ -70,7 +69,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
*/
public abstract class AbstractTestIPC {
- private static final Log LOG = LogFactory.getLog(AbstractTestIPC.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AbstractTestIPC.class);
private static final byte[] CELL_BYTES = Bytes.toBytes("xyz");
private static final KeyValue CELL = new KeyValue(CELL_BYTES, CELL_BYTES, CELL_BYTES, CELL_BYTES);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestFifoRpcScheduler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestFifoRpcScheduler.java
index 8611e48caf..119a28c9c3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestFifoRpcScheduler.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestFifoRpcScheduler.java
@@ -17,8 +17,6 @@
*/
package org.apache.hadoop.hbase.ipc;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -32,6 +30,8 @@ import org.junit.experimental.categories.Category;
import org.junit.rules.TestRule;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.lang.reflect.Field;
@@ -52,7 +52,7 @@ public class TestFifoRpcScheduler {
CategoryBasedTimeout.builder().withTimeout(this.getClass()).
withLookingForStuckThread(true).build();
- private static final Log LOG = LogFactory.getLog(TestFifoRpcScheduler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestFifoRpcScheduler.class);
private AtomicInteger callExecutionCount;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcClientLeaks.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcClientLeaks.java
index 20687ab424..06154cd5d6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcClientLeaks.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcClientLeaks.java
@@ -27,8 +27,6 @@ import java.net.Socket;
import java.net.SocketAddress;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -49,6 +47,8 @@ import org.junit.experimental.categories.Category;
import org.junit.rules.ExpectedException;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category(MediumTests.class)
public class TestRpcClientLeaks {
@@ -99,7 +99,7 @@ public class TestRpcClientLeaks {
UTIL.shutdownMiniCluster();
}
- public static final Log LOG = LogFactory.getLog(TestRpcClientLeaks.class);
+ public static final Logger LOG = LoggerFactory.getLogger(TestRpcClientLeaks.class);
@Test(expected=RetriesExhaustedException.class)
public void testSocketClosed() throws IOException, InterruptedException {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
index 8521e653b2..9e59bb9c8e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
@@ -46,8 +46,6 @@ import java.util.concurrent.BlockingQueue;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.LinkedBlockingQueue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -71,6 +69,8 @@ import org.junit.experimental.categories.Category;
import org.junit.rules.TestRule;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({RPCTests.class, SmallTests.class})
public class TestSimpleRpcScheduler {
@@ -79,7 +79,7 @@ public class TestSimpleRpcScheduler {
CategoryBasedTimeout.builder().withTimeout(this.getClass()).
withLookingForStuckThread(true).build();
- private static final Log LOG = LogFactory.getLog(TestSimpleRpcScheduler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSimpleRpcScheduler.class);
private final RpcScheduler.Context CONTEXT = new RpcScheduler.Context() {
@Override
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/AbstractTestDLS.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/AbstractTestDLS.java
index f8ef1e1f84..f747fd6e79 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/AbstractTestDLS.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/AbstractTestDLS.java
@@ -43,8 +43,6 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.LongAdder;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
@@ -93,14 +91,15 @@ import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
/**
* Base class for testing distributed log splitting.
*/
public abstract class AbstractTestDLS {
- private static final Log LOG = LogFactory.getLog(TestSplitLogManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSplitLogManager.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java
index 9b6cd16565..d813dfb511 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java
@@ -25,8 +25,6 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.concurrent.Semaphore;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hbase.ChoreService;
@@ -51,13 +49,15 @@ import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test the {@link ActiveMasterManager}.
*/
@Category({MasterTests.class, MediumTests.class})
public class TestActiveMasterManager {
- private final static Log LOG = LogFactory.getLog(TestActiveMasterManager.class);
+ private final static Logger LOG = LoggerFactory.getLogger(TestActiveMasterManager.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@BeforeClass
@@ -226,7 +226,7 @@ public class TestActiveMasterManager {
}
public static class NodeDeletionListener extends ZKListener {
- private static final Log LOG = LogFactory.getLog(NodeDeletionListener.class);
+ private static final Logger LOG = LoggerFactory.getLogger(NodeDeletionListener.class);
private Semaphore lock;
private String node;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java
index 7f8e9c9ee1..34a1ccf6aa 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java
@@ -26,8 +26,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -58,10 +56,12 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, MediumTests.class})
public class TestAssignmentListener {
- private static final Log LOG = LogFactory.getLog(TestAssignmentListener.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestAssignmentListener.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerMetrics.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerMetrics.java
index fbbebcc4b0..717933aa76 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerMetrics.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerMetrics.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.master;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CompatibilityFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -40,13 +38,15 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.fail;
@Category(MediumTests.class)
public class TestAssignmentManagerMetrics {
- private static final Log LOG = LogFactory.getLog(TestAssignmentManagerMetrics.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestAssignmentManagerMetrics.class);
private static final MetricsAssertHelper metricsHelper = CompatibilityFactory
.getInstance(MetricsAssertHelper.class);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
index fcdf4d6ba5..da501006d1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
@@ -28,13 +28,12 @@ import static org.mockito.Mockito.spy;
import java.io.IOException;
import java.util.Map;
import java.util.NavigableMap;
+import java.util.Objects;
import java.util.SortedMap;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentSkipListMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -62,7 +61,6 @@ import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.HFileArchiveUtil;
-import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.util.Triple;
import org.junit.After;
import org.junit.Before;
@@ -72,10 +70,12 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, SmallTests.class})
public class TestCatalogJanitor {
- private static final Log LOG = LogFactory.getLog(TestCatalogJanitor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestCatalogJanitor.class);
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().
withTimeout(this.getClass()).withLookingForStuckThread(true).build();
@Rule public final TestName name = new TestName();
@@ -478,7 +478,7 @@ public class TestCatalogJanitor {
private void logFiles(String description, FileStatus[] storeFiles) {
LOG.debug("Current " + description + ": ");
for (FileStatus file : storeFiles) {
- LOG.debug(file.getPath());
+ LOG.debug(Objects.toString(file.getPath()));
}
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitorInMemoryStates.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitorInMemoryStates.java
index 28ed6a8c2b..c42d042a43 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitorInMemoryStates.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitorInMemoryStates.java
@@ -26,8 +26,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -56,10 +54,12 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, MediumTests.class})
public class TestCatalogJanitorInMemoryStates {
- private static final Log LOG = LogFactory.getLog(TestCatalogJanitorInMemoryStates.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestCatalogJanitorInMemoryStates.class);
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().
withTimeout(this.getClass()).withLookingForStuckThread(true).build();
@Rule public final TestName name = new TestName();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestClockSkewDetection.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestClockSkewDetection.java
index bd7c5073f9..77f038af06 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestClockSkewDetection.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestClockSkewDetection.java
@@ -24,8 +24,6 @@ import static org.mockito.Mockito.when;
import java.net.InetAddress;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClockOutOfSyncException;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -36,11 +34,13 @@ import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, SmallTests.class})
public class TestClockSkewDetection {
- private static final Log LOG =
- LogFactory.getLog(TestClockSkewDetection.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestClockSkewDetection.class);
@Test
public void testClockSkewDetection() throws Exception {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java
index 4b2c91150b..fe7bc10f08 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java
@@ -23,8 +23,6 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
@@ -45,13 +43,15 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
@Category({ MasterTests.class, MediumTests.class })
public class TestHMasterRPCException {
- private static final Log LOG = LogFactory.getLog(TestHMasterRPCException.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHMasterRPCException.class);
private final HBaseTestingUtility testUtil = HBaseTestingUtility.createLocalHTU();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMaster.java
index eed793f099..07a21beb40 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMaster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMaster.java
@@ -26,8 +26,6 @@ import static org.junit.Assert.fail;
import java.io.IOException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
@@ -55,13 +53,14 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
@Category({MasterTests.class, MediumTests.class})
public class TestMaster {
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
- private static final Log LOG = LogFactory.getLog(TestMaster.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestMaster.class);
private static final TableName TABLENAME =
TableName.valueOf("TestMaster");
private static final byte[] FAMILYNAME = Bytes.toBytes("fam");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java
index 18378ace52..ff813eba77 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java
@@ -24,8 +24,6 @@ import static org.junit.Assert.assertTrue;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -42,10 +40,12 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({FlakeyTests.class, LargeTests.class})
public class TestMasterFailover {
- private static final Log LOG = LogFactory.getLog(TestMasterFailover.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestMasterFailover.class);
@Rule public TestName name = new TestName();
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().
withTimeout(this.getClass()).
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFileSystem.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFileSystem.java
index bf13e7fc5d..4d2a885fa2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFileSystem.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFileSystem.java
@@ -21,8 +21,6 @@ import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -34,13 +32,15 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test the master filesystem in a local cluster
*/
@Category({MasterTests.class, MediumTests.class})
public class TestMasterFileSystem {
- private static final Log LOG = LogFactory.getLog(TestMasterFileSystem.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestMasterFileSystem.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetrics.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetrics.java
index b300818047..9b84823514 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetrics.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetrics.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.master;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CompatibilityFactory;
import org.apache.hadoop.hbase.CoordinatedStateManager;
@@ -38,11 +36,13 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, MediumTests.class})
public class TestMasterMetrics {
- private static final Log LOG = LogFactory.getLog(TestMasterMetrics.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestMasterMetrics.class);
private static final MetricsAssertHelper metricsHelper = CompatibilityFactory
.getInstance(MetricsAssertHelper.class);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetricsWrapper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetricsWrapper.java
index 9acd2f7500..1b5175a0ce 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetricsWrapper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetricsWrapper.java
@@ -21,8 +21,6 @@ import static org.junit.Assert.*;
import java.util.AbstractMap.SimpleImmutableEntry;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot;
import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot.SpaceQuotaStatus;
@@ -34,10 +32,12 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, MediumTests.class})
public class TestMasterMetricsWrapper {
- private static final Log LOG = LogFactory.getLog(TestMasterMetricsWrapper.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestMasterMetricsWrapper.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final int NUM_RS = 4;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java
index 20c9fe1f85..7a2817e088 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java
@@ -27,8 +27,6 @@ import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
@@ -66,6 +64,8 @@ import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Standup the master and fake it to test various aspects of master function.
@@ -77,7 +77,7 @@ import org.mockito.Mockito;
*/
@Category({MasterTests.class, MediumTests.class})
public class TestMasterNoCluster {
- private static final Log LOG = LogFactory.getLog(TestMasterNoCluster.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestMasterNoCluster.class);
private static final HBaseTestingUtility TESTUTIL = new HBaseTestingUtility();
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
index dd0fada0f1..43c258ed8e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
@@ -32,8 +32,6 @@ import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClusterStatus.Option;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -66,10 +64,12 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, MediumTests.class})
public class TestMasterOperationsForRegionReplicas {
- private static final Log LOG = LogFactory.getLog(TestRegionPlacement.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRegionPlacement.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static Connection CONNECTION = null;
private static Admin ADMIN;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterRestartAfterDisablingTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterRestartAfterDisablingTable.java
index fc49c4441f..2af6255c0e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterRestartAfterDisablingTable.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterRestartAfterDisablingTable.java
@@ -24,8 +24,6 @@ import static org.junit.Assert.assertTrue;
import java.util.List;
import java.util.NavigableSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -43,11 +41,14 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, LargeTests.class})
public class TestMasterRestartAfterDisablingTable {
- private static final Log LOG = LogFactory.getLog(TestMasterRestartAfterDisablingTable.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestMasterRestartAfterDisablingTable.class);
@Rule
public TestName name = new TestName();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterShutdown.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterShutdown.java
index ebf16b7007..0eff8df289 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterShutdown.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterShutdown.java
@@ -25,8 +25,6 @@ import static org.junit.Assert.assertTrue;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -41,10 +39,12 @@ import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, LargeTests.class})
public class TestMasterShutdown {
- private static final Log LOG = LogFactory.getLog(TestMasterShutdown.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestMasterShutdown.class);
/**
* Simple test of shutdown.
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java
index 042a462129..65351bdc7c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.master;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.MetaTableAccessor;
@@ -44,6 +42,8 @@ import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test transitions of state across the master. Sets up the cluster once and
@@ -51,7 +51,7 @@ import org.junit.experimental.categories.Category;
*/
@Category({MasterTests.class, LargeTests.class})
public class TestMasterTransitions {
- private static final Log LOG = LogFactory.getLog(TestMasterTransitions.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestMasterTransitions.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final TableName TABLENAME = TableName.valueOf("master_transitions");
private static final byte [][] FAMILIES = new byte [][] {Bytes.toBytes("a"),
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
index 0c9e33ebc4..e99d533e18 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
@@ -34,8 +34,6 @@ import java.util.Map;
import java.util.Random;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -70,10 +68,12 @@ import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, MediumTests.class})
public class TestRegionPlacement {
- private static final Log LOG = LogFactory.getLog(TestRegionPlacement.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRegionPlacement.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private final static int SLAVES = 10;
private static Connection CONNECTION;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement2.java
index 87f5ba3ab5..835e274e8d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement2.java
@@ -25,8 +25,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -47,10 +45,12 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, MediumTests.class})
public class TestRegionPlacement2 {
- private static final Log LOG = LogFactory.getLog(TestRegionPlacement2.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRegionPlacement2.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private final static int SLAVES = 7;
private final static int PRIMARY = Position.PRIMARY.ordinal();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java
index 9809090003..b871bb12ab 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java
@@ -25,8 +25,6 @@ import static org.junit.Assert.assertTrue;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.MetaTableAccessor;
@@ -44,10 +42,12 @@ import org.apache.hadoop.hbase.util.Threads;
import org.junit.After;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, LargeTests.class})
public class TestRestartCluster {
- private static final Log LOG = LogFactory.getLog(TestRestartCluster.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRestartCluster.class);
private HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final TableName[] TABLES = {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRollingRestart.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRollingRestart.java
index 89feadf8ce..8953147b76 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRollingRestart.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRollingRestart.java
@@ -26,8 +26,6 @@ import java.util.NavigableSet;
import java.util.Set;
import java.util.TreeSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -46,7 +44,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
/**
@@ -54,7 +53,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
*/
@Category({MasterTests.class, LargeTests.class})
public class TestRollingRestart {
- private static final Log LOG = LogFactory.getLog(TestRollingRestart.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRollingRestart.class);
@Rule
public TestName name = new TestName();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java
index d74b7320da..cd5239edbe 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java
@@ -40,8 +40,6 @@ import java.util.Map;
import java.util.UUID;
import java.util.concurrent.atomic.LongAdder;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -62,8 +60,6 @@ import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.ZooDefs.Ids;
@@ -73,17 +69,15 @@ import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, MediumTests.class})
public class TestSplitLogManager {
- private static final Log LOG = LogFactory.getLog(TestSplitLogManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSplitLogManager.class);
private final ServerManager sm = Mockito.mock(ServerManager.class);
- static {
- Logger.getLogger("org.apache.hadoop.hbase").setLevel(Level.DEBUG);
- }
-
private ZKWatcher zkw;
private DummyMasterServices master;
private SplitLogManager slm;
@@ -540,7 +534,7 @@ public class TestSplitLogManager {
try {
ZKUtil.setData(zkw, entry.getKey(), slt.toByteArray());
} catch (KeeperException e) {
- LOG.warn(e);
+ LOG.warn(e.toString(), e);
encounteredZKException = true;
}
if (!encounteredZKException) {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestWarmupRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestWarmupRegion.java
index a3de52d500..5cb82a27e2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestWarmupRegion.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestWarmupRegion.java
@@ -23,8 +23,6 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HTableDescriptor;
@@ -46,6 +44,8 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Run tests that use the HBase clients; {@link org.apache.hadoop.hbase.client.HTable}.
@@ -55,7 +55,7 @@ import org.junit.experimental.categories.Category;
@Category({MasterTests.class, LargeTests.class})
@SuppressWarnings ("deprecation")
public class TestWarmupRegion {
- private static final Log LOG = LogFactory.getLog(TestWarmupRegion.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestWarmupRegion.class);
protected TableName TABLENAME = TableName.valueOf("testPurgeFutureDeletes");
protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static byte [] ROW = Bytes.toBytes("testRow");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/AssignmentTestingUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/AssignmentTestingUtil.java
index 459e4ee5fa..3799d73918 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/AssignmentTestingUtil.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/AssignmentTestingUtil.java
@@ -21,8 +21,6 @@ import static org.junit.Assert.assertEquals;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.Waiter.ExplainingPredicate;
@@ -31,11 +29,13 @@ import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public abstract class AssignmentTestingUtil {
- private static final Log LOG = LogFactory.getLog(AssignmentTestingUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AssignmentTestingUtil.class);
private AssignmentTestingUtil() {}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.java
index f4365eac87..3c453bcf4c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.java
@@ -37,8 +37,6 @@ import java.util.concurrent.Future;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.DoNotRetryIOException;
@@ -54,7 +52,6 @@ import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
import org.apache.hadoop.hbase.master.MasterServices;
import org.apache.hadoop.hbase.master.RegionState.State;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureConstants;
-import org.apache.hadoop.hbase.master.procedure.MasterProcedureScheduler;
import org.apache.hadoop.hbase.master.procedure.ProcedureSyncWait;
import org.apache.hadoop.hbase.master.procedure.RSProcedureDispatcher;
import org.apache.hadoop.hbase.procedure2.Procedure;
@@ -69,8 +66,6 @@ import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.ipc.RemoteException;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
@@ -80,7 +75,8 @@ import org.junit.experimental.categories.Category;
import org.junit.rules.ExpectedException;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
@@ -96,10 +92,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProto
@Category({MasterTests.class, MediumTests.class})
public class TestAssignmentManager {
- private static final Log LOG = LogFactory.getLog(TestAssignmentManager.class);
- static {
- Logger.getLogger(MasterProcedureScheduler.class).setLevel(Level.TRACE);
- }
+ private static final Logger LOG = LoggerFactory.getLogger(TestAssignmentManager.class);
+
@Rule public TestName name = new TestName();
@Rule public final TestRule timeout =
CategoryBasedTimeout.builder().withTimeout(this.getClass()).
@@ -534,6 +528,7 @@ public class TestAssignmentManager {
}
private class NoopRsExecutor implements MockRSExecutor {
+ @Override
public ExecuteProceduresResponse sendRequest(ServerName server,
ExecuteProceduresRequest request) throws IOException {
ExecuteProceduresResponse.Builder builder = ExecuteProceduresResponse.newBuilder();
@@ -602,6 +597,7 @@ public class TestAssignmentManager {
}
private static class ServerNotYetRunningRsExecutor implements MockRSExecutor {
+ @Override
public ExecuteProceduresResponse sendRequest(ServerName server, ExecuteProceduresRequest req)
throws IOException {
throw new ServerNotRunningYetException("wait on server startup");
@@ -615,6 +611,7 @@ public class TestAssignmentManager {
this.exception = exception;
}
+ @Override
public ExecuteProceduresResponse sendRequest(ServerName server, ExecuteProceduresRequest req)
throws IOException {
throw exception;
@@ -634,6 +631,7 @@ public class TestAssignmentManager {
this.maxSocketTimeoutRetries = maxSocketTimeoutRetries;
}
+ @Override
public ExecuteProceduresResponse sendRequest(ServerName server, ExecuteProceduresRequest req)
throws IOException {
// SocketTimeoutException should be a temporary problem
@@ -746,6 +744,7 @@ public class TestAssignmentManager {
private class RandRsExecutor extends NoopRsExecutor {
private final Random rand = new Random();
+ @Override
public ExecuteProceduresResponse sendRequest(ServerName server, ExecuteProceduresRequest req)
throws IOException {
switch (rand.nextInt(5)) {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestAssignmentOnRSCrash.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestAssignmentOnRSCrash.java
index 72fb7ad12e..d33eab7cab 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestAssignmentOnRSCrash.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestAssignmentOnRSCrash.java
@@ -24,8 +24,6 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.ServerName;
@@ -43,10 +41,12 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, LargeTests.class})
public class TestAssignmentOnRSCrash {
- private static final Log LOG = LogFactory.getLog(TestAssignmentOnRSCrash.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestAssignmentOnRSCrash.class);
private static final TableName TEST_TABLE = TableName.valueOf("testb");
private static final String FAMILY_STR = "f";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestMergeTableRegionsProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestMergeTableRegionsProcedure.java
index beb53eccc9..57882dfb4f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestMergeTableRegionsProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestMergeTableRegionsProcedure.java
@@ -23,8 +23,6 @@ import static org.junit.Assert.assertTrue;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -54,10 +52,12 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, MediumTests.class})
public class TestMergeTableRegionsProcedure {
- private static final Log LOG = LogFactory.getLog(TestMergeTableRegionsProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestMergeTableRegionsProcedure.class);
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().
withTimeout(this.getClass()).withLookingForStuckThread(true).build();
@Rule public final TestName name = new TestName();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestRegionStates.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestRegionStates.java
index bd131300ae..587ebc0786 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestRegionStates.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestRegionStates.java
@@ -28,8 +28,6 @@ import java.util.concurrent.Future;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -45,10 +43,12 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, MediumTests.class})
public class TestRegionStates {
- private static final Log LOG = LogFactory.getLog(TestRegionStates.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRegionStates.class);
protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestRogueRSAssignment.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestRogueRSAssignment.java
index 49eb5735f6..056e66d3e6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestRogueRSAssignment.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestRogueRSAssignment.java
@@ -18,8 +18,6 @@
*/
package org.apache.hadoop.hbase.master.assignment;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HRegionInfo;
@@ -51,6 +49,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.ExpectedException;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.List;
@@ -64,7 +64,7 @@ import static org.junit.Assert.assertNotNull;
*/
@Category({MasterTests.class, MediumTests.class})
public class TestRogueRSAssignment {
- private static final Log LOG = LogFactory.getLog(TestRogueRSAssignment.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRogueRSAssignment.class);
@Rule
public final TestName name = new TestName();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestSplitTableRegionProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestSplitTableRegionProcedure.java
index 1cdd9c59fc..37d982082e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestSplitTableRegionProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestSplitTableRegionProcedure.java
@@ -25,8 +25,6 @@ import static org.junit.Assert.fail;
import java.io.IOException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.Cell;
@@ -63,10 +61,12 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, MediumTests.class})
public class TestSplitTableRegionProcedure {
- private static final Log LOG = LogFactory.getLog(TestSplitTableRegionProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSplitTableRegionProcedure.class);
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().
withTimeout(this.getClass()).withLookingForStuckThread(true).build();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.java
index ee2e433fc0..adf56b8fe8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.java
@@ -35,8 +35,6 @@ import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.ServerName;
@@ -50,6 +48,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.net.DNSToSwitchMapping;
import org.junit.Assert;
import org.junit.BeforeClass;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Class used to be the base of unit tests on load balancers. It gives helper
@@ -58,7 +58,7 @@ import org.junit.BeforeClass;
*
*/
public class BalancerTestBase {
- private static final Log LOG = LogFactory.getLog(BalancerTestBase.class);
+ private static final Logger LOG = LoggerFactory.getLogger(BalancerTestBase.class);
protected static Random rand = new Random();
static int regionId = 0;
protected static Configuration conf;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/LoadBalancerPerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/LoadBalancerPerformanceEvaluation.java
index 2b40ea7362..2aaa3af20c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/LoadBalancerPerformanceEvaluation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/LoadBalancerPerformanceEvaluation.java
@@ -28,8 +28,6 @@ import java.util.concurrent.TimeUnit;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HConstants;
@@ -41,7 +39,8 @@ import org.apache.hadoop.hbase.master.LoadBalancer;
import org.apache.hadoop.hbase.util.AbstractHBaseTool;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Stopwatch;
@@ -55,8 +54,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Stopwatch;
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
public class LoadBalancerPerformanceEvaluation extends AbstractHBaseTool {
- private static final Log LOG =
- LogFactory.getLog(LoadBalancerPerformanceEvaluation.class.getName());
+ private static final Logger LOG =
+ LoggerFactory.getLogger(LoadBalancerPerformanceEvaluation.class.getName());
protected static final HBaseCommonTestingUtility UTIL = new HBaseCommonTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java
index c33cd56e4a..b7fe71f5d3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java
@@ -35,8 +35,6 @@ import java.util.TreeSet;
import java.util.stream.Collectors;
import org.apache.commons.lang3.ArrayUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseIOException;
@@ -61,14 +59,15 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.mockito.Mockito;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@Category({MasterTests.class, MediumTests.class})
public class TestBaseLoadBalancer extends BalancerTestBase {
private static LoadBalancer loadBalancer;
- private static final Log LOG = LogFactory.getLog(TestBaseLoadBalancer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestBaseLoadBalancer.class);
private static final ServerName master = ServerName.valueOf("fake-master", 0, 1L);
private static RackManager rackManager;
private static final int NUM_SERVERS = 15;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestDefaultLoadBalancer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestDefaultLoadBalancer.java
index 4d09bf8147..4b500c9619 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestDefaultLoadBalancer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestDefaultLoadBalancer.java
@@ -25,8 +25,6 @@ import java.util.List;
import java.util.Map;
import java.util.TreeMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.ServerName;
@@ -43,13 +41,15 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test the load balancer that is created by default.
*/
@Category({MasterTests.class, MediumTests.class})
public class TestDefaultLoadBalancer extends BalancerTestBase {
- private static final Log LOG = LogFactory.getLog(TestDefaultLoadBalancer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestDefaultLoadBalancer.class);
private static LoadBalancer loadBalancer;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredNodeTableImport.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredNodeTableImport.java
index 3f39f98bb3..cec2968e57 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredNodeTableImport.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredNodeTableImport.java
@@ -25,8 +25,6 @@ import static org.junit.Assert.assertEquals;
import java.util.List;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -45,6 +43,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
import org.junit.After;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/*
* This case tests a scenario when a cluster with tables is moved from Stochastic Load Balancer
@@ -53,7 +53,7 @@ import org.junit.experimental.categories.Category;
@Category(MediumTests.class)
public class TestFavoredNodeTableImport {
- private static final Log LOG = LogFactory.getLog(TestFavoredNodeTableImport.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestFavoredNodeTableImport.class);
private static final int SLAVES = 3;
private static final int REGION_NUM = 20;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredStochasticBalancerPickers.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredStochasticBalancerPickers.java
index 5a0951997d..fb0dea7827 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredStochasticBalancerPickers.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredStochasticBalancerPickers.java
@@ -28,8 +28,6 @@ import java.util.EnumSet;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.ClusterStatus.Option;
@@ -59,6 +57,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.JVMClusterUtil;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
@@ -71,7 +71,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
@Category(LargeTests.class)
public class TestFavoredStochasticBalancerPickers extends BalancerTestBase {
- private static final Log LOG = LogFactory.getLog(TestFavoredStochasticBalancerPickers.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestFavoredStochasticBalancerPickers.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final int SLAVES = 6;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredStochasticLoadBalancer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredStochasticLoadBalancer.java
index d6f559f17d..6541f15c59 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredStochasticLoadBalancer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredStochasticLoadBalancer.java
@@ -31,8 +31,6 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClusterStatus.Option;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -54,7 +52,6 @@ import org.apache.hadoop.hbase.master.ServerManager;
import org.apache.hadoop.hbase.master.assignment.RegionStates;
import org.apache.hadoop.hbase.master.assignment.RegionStates.RegionStateNode;
import org.apache.hadoop.hbase.regionserver.HRegion;
-import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.JVMClusterUtil;
@@ -64,7 +61,8 @@ import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
@@ -72,7 +70,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
@Category(MediumTests.class)
public class TestFavoredStochasticLoadBalancer extends BalancerTestBase {
- private static final Log LOG = LogFactory.getLog(TestFavoredStochasticLoadBalancer.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestFavoredStochasticLoadBalancer.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final int SLAVES = 8;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRegionsOnMasterOptions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRegionsOnMasterOptions.java
index 58c33331db..a8e78d7ae9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRegionsOnMasterOptions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRegionsOnMasterOptions.java
@@ -17,8 +17,6 @@
*/
package org.apache.hadoop.hbase.master.balancer;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.Table;
@@ -36,6 +34,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.util.List;
@@ -49,7 +49,7 @@ import static org.junit.Assert.assertTrue;
*/
@Category({MediumTests.class})
public class TestRegionsOnMasterOptions {
- private static final Log LOG = LogFactory.getLog(TestRegionsOnMasterOptions.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRegionsOnMasterOptions.class);
@Rule public TestName name = new TestName();
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()).
withLookingForStuckThread(true).build();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestStochasticLoadBalancer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestStochasticLoadBalancer.java
index 68d009dfa0..1e6f1dd403 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestStochasticLoadBalancer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestStochasticLoadBalancer.java
@@ -34,8 +34,6 @@ import java.util.Map.Entry;
import java.util.Queue;
import java.util.TreeMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -55,11 +53,13 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({FlakeyTests.class, MediumTests.class})
public class TestStochasticLoadBalancer extends BalancerTestBase {
public static final String REGION_KEY = "testRegion";
- private static final Log LOG = LogFactory.getLog(TestStochasticLoadBalancer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestStochasticLoadBalancer.class);
// Mapping of locality test -> expected locality
private float[] expectedLocalities = {1.0f, 0.0f, 0.50f, 0.25f, 1.0f};
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestStochasticLoadBalancer2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestStochasticLoadBalancer2.java
index 0b69a4a052..637c2f6dac 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestStochasticLoadBalancer2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestStochasticLoadBalancer2.java
@@ -17,8 +17,6 @@
*/
package org.apache.hadoop.hbase.master.balancer;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.testclassification.FlakeyTests;
import org.apache.hadoop.hbase.testclassification.LargeTests;
@@ -28,10 +26,12 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({FlakeyTests.class, LargeTests.class})
public class TestStochasticLoadBalancer2 extends BalancerTestBase {
- private static final Log LOG = LogFactory.getLog(TestStochasticLoadBalancer2.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestStochasticLoadBalancer2.class);
@Rule
public final TestRule timeout = CategoryBasedTimeout.builder()
.withTimeout(this.getClass())
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java
index 39bdbc7e6a..a9a856b905 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java
@@ -24,8 +24,6 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
@@ -43,11 +41,13 @@ import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, SmallTests.class})
public class TestCleanerChore {
- private static final Log LOG = LogFactory.getLog(TestCleanerChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestCleanerChore.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
@After
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileCleaner.java
index a08b0c7ee3..4520499321 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileCleaner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileCleaner.java
@@ -25,8 +25,6 @@ import java.io.IOException;
import java.util.List;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
@@ -51,10 +49,12 @@ import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, MediumTests.class})
public class TestHFileCleaner {
- private static final Log LOG = LogFactory.getLog(TestHFileCleaner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHFileCleaner.class);
private final static HBaseTestingUtility UTIL = new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java
index 34e81db428..43fc6a4f33 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java
@@ -31,8 +31,6 @@ import java.util.LinkedList;
import java.util.List;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.conf.Configuration;
@@ -68,11 +66,13 @@ import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, MediumTests.class})
public class TestLogsCleaner {
- private static final Log LOG = LogFactory.getLog(TestLogsCleaner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestLogsCleaner.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@BeforeClass
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java
index 37c9221fd4..2f233016a7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java
@@ -25,8 +25,6 @@ import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -66,10 +64,12 @@ import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ MasterTests.class, SmallTests.class })
public class TestReplicationHFileCleaner {
- private static final Log LOG = LogFactory.getLog(ReplicationQueuesZKImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ReplicationQueuesZKImpl.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static Server server;
private static ReplicationQueues rq;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java
index 756152ed41..9e76876fc2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java
@@ -26,8 +26,6 @@ import java.util.Collection;
import java.util.List;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -67,7 +65,8 @@ import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
@@ -79,7 +78,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@Category({MasterTests.class, MediumTests.class})
public class TestSnapshotFromMaster {
- private static final Log LOG = LogFactory.getLog(TestSnapshotFromMaster.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSnapshotFromMaster.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final int NUM_RS = 2;
private static Path rootDir;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockManager.java
index e2e97dc033..80a9b7b248 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockManager.java
@@ -24,8 +24,6 @@ import static org.junit.Assert.assertTrue;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HRegionInfo;
@@ -47,6 +45,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, SmallTests.class})
public class TestLockManager {
@@ -55,7 +55,7 @@ public class TestLockManager {
// crank this up if this test turns out to be flaky.
private static final int LOCAL_LOCKS_TIMEOUT = 1000;
- private static final Log LOG = LogFactory.getLog(TestLockProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestLockProcedure.class);
protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static MasterServices masterServices;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockProcedure.java
index a817bd5b37..28c48d974b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockProcedure.java
@@ -28,8 +28,6 @@ import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.DoNotRetryIOException;
@@ -60,7 +58,8 @@ import org.junit.experimental.categories.Category;
import org.junit.rules.ExpectedException;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest;
@@ -81,7 +80,7 @@ public class TestLockProcedure {
private static final int HEARTBEAT_TIMEOUT = 2000;
private static final int LOCAL_LOCKS_TIMEOUT = 4000;
- private static final Log LOG = LogFactory.getLog(TestLockProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestLockProcedure.class);
protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static MasterRpcServices masterRpcService;
private static ProcedureExecutor procExec;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizer.java
index 0936c1643c..0a8d74909c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizer.java
@@ -29,8 +29,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.RegionLoad;
import org.apache.hadoop.hbase.ServerName;
@@ -50,6 +48,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
@@ -57,7 +57,7 @@ import org.mockito.Mockito;
*/
@Category({MasterTests.class, SmallTests.class})
public class TestSimpleRegionNormalizer {
- private static final Log LOG = LogFactory.getLog(TestSimpleRegionNormalizer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSimpleRegionNormalizer.class);
private static RegionNormalizer normalizer;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java
index 8fe53af3d5..5236c0465b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java
@@ -25,8 +25,6 @@ import java.util.Collections;
import java.util.Comparator;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
@@ -55,13 +53,16 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Testing {@link SimpleRegionNormalizer} on minicluster.
*/
@Category({MasterTests.class, MediumTests.class})
public class TestSimpleRegionNormalizerOnCluster {
- private static final Log LOG = LogFactory.getLog(TestSimpleRegionNormalizerOnCluster.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestSimpleRegionNormalizerOnCluster.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final byte[] FAMILYNAME = Bytes.toBytes("fam");
private static Admin admin;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java
index c68c01ffb4..243bb1487d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java
@@ -28,8 +28,6 @@ import java.util.TreeSet;
import java.util.concurrent.Callable;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -63,10 +61,12 @@ import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.MD5Hash;
import org.apache.hadoop.hbase.util.ModifyRegionUtils;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class MasterProcedureTestingUtility {
- private static final Log LOG = LogFactory.getLog(MasterProcedureTestingUtility.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MasterProcedureTestingUtility.class);
private MasterProcedureTestingUtility() { }
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCloneSnapshotProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCloneSnapshotProcedure.java
index b68a2f747a..490501a866 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCloneSnapshotProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCloneSnapshotProcedure.java
@@ -22,8 +22,6 @@ import static org.junit.Assert.assertTrue;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableExistsException;
@@ -42,10 +40,12 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.junit.After;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, MediumTests.class})
public class TestCloneSnapshotProcedure extends TestTableDDLProcedureBase {
- private static final Log LOG = LogFactory.getLog(TestCloneSnapshotProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestCloneSnapshotProcedure.class);
protected final byte[] CF = Bytes.toBytes("cf1");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateNamespaceProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateNamespaceProcedure.java
index 81942e1a9e..32a757e634 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateNamespaceProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateNamespaceProcedure.java
@@ -24,8 +24,6 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.NamespaceDescriptor;
@@ -43,10 +41,12 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, MediumTests.class})
public class TestCreateNamespaceProcedure {
- private static final Log LOG = LogFactory.getLog(TestCreateNamespaceProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestCreateNamespaceProcedure.class);
protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateTableProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateTableProcedure.java
index c57f210be2..e54eb66162 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateTableProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateTableProcedure.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.master.procedure;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.TableExistsException;
import org.apache.hadoop.hbase.TableName;
@@ -41,10 +39,12 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, MediumTests.class})
public class TestCreateTableProcedure extends TestTableDDLProcedureBase {
- private static final Log LOG = LogFactory.getLog(TestCreateTableProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestCreateTableProcedure.class);
private static final String F1 = "f1";
private static final String F2 = "f2";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteNamespaceProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteNamespaceProcedure.java
index dcfed2991e..fa2507e926 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteNamespaceProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteNamespaceProcedure.java
@@ -24,8 +24,6 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HTableDescriptor;
@@ -46,10 +44,12 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, MediumTests.class})
public class TestDeleteNamespaceProcedure {
- private static final Log LOG = LogFactory.getLog(TestDeleteNamespaceProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestDeleteNamespaceProcedure.class);
protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java
index 6f109e516a..9f4c1803f4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.master.procedure;
import static org.junit.Assert.assertTrue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotDisabledException;
@@ -38,10 +36,12 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, MediumTests.class})
public class TestDeleteTableProcedure extends TestTableDDLProcedureBase {
- private static final Log LOG = LogFactory.getLog(TestDeleteTableProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestDeleteTableProcedure.class);
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()).
withLookingForStuckThread(true).build();
@Rule public TestName name = new TestName();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDisableTableProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDisableTableProcedure.java
index 9c5970cbbc..e49bfcbbce 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDisableTableProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDisableTableProcedure.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.master.procedure;
import static org.junit.Assert.assertTrue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotEnabledException;
@@ -37,10 +35,12 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, MediumTests.class})
public class TestDisableTableProcedure extends TestTableDDLProcedureBase {
- private static final Log LOG = LogFactory.getLog(TestDisableTableProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestDisableTableProcedure.class);
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()).
withLookingForStuckThread(true).build();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestEnableTableProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestEnableTableProcedure.java
index 66071d3fd0..8364dfea5d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestEnableTableProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestEnableTableProcedure.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.master.procedure;
import static org.junit.Assert.assertTrue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotDisabledException;
@@ -37,10 +35,12 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, MediumTests.class})
public class TestEnableTableProcedure extends TestTableDDLProcedureBase {
- private static final Log LOG = LogFactory.getLog(TestEnableTableProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestEnableTableProcedure.class);
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()).
withLookingForStuckThread(true).build();
@Rule public TestName name = new TestName();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterFailoverWithProcedures.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterFailoverWithProcedures.java
index 3eeb382ad5..31faa083b9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterFailoverWithProcedures.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterFailoverWithProcedures.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.master.procedure;
import static org.junit.Assert.assertEquals;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
@@ -44,7 +42,8 @@ import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestRule;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateTableState;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteTableState;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DisableTableState;
@@ -53,7 +52,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.T
@Category({MasterTests.class, LargeTests.class})
public class TestMasterFailoverWithProcedures {
- private static final Log LOG = LogFactory.getLog(TestMasterFailoverWithProcedures.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestMasterFailoverWithProcedures.class);
@ClassRule
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureEvents.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureEvents.java
index b0a598ef0d..3cb5e646a0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureEvents.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureEvents.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.master.procedure;
import static org.junit.Assert.assertEquals;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -45,10 +43,12 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, MediumTests.class})
public class TestMasterProcedureEvents {
- private static final Log LOG = LogFactory.getLog(TestCreateTableProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestCreateTableProcedure.class);
protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java
index d971b5fb26..0291165848 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java
@@ -24,8 +24,7 @@ import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HRegionInfo;
@@ -47,10 +46,12 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, SmallTests.class})
public class TestMasterProcedureScheduler {
- private static final Log LOG = LogFactory.getLog(TestMasterProcedureScheduler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestMasterProcedureScheduler.class);
private MasterProcedureScheduler queue;
private Configuration conf;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureSchedulerConcurrency.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureSchedulerConcurrency.java
index af48f641ea..2e8e52ae24 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureSchedulerConcurrency.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureSchedulerConcurrency.java
@@ -23,8 +23,6 @@ import java.util.ArrayList;
import java.util.HashSet;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
@@ -37,13 +35,16 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@Category({MasterTests.class, MediumTests.class})
public class TestMasterProcedureSchedulerConcurrency {
- private static final Log LOG = LogFactory.getLog(TestMasterProcedureSchedulerConcurrency.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestMasterProcedureSchedulerConcurrency.class);
private MasterProcedureScheduler queue;
private Configuration conf;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureWalLease.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureWalLease.java
index e0452c2f2a..ff2303acc3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureWalLease.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureWalLease.java
@@ -25,8 +25,6 @@ import static org.junit.Assert.fail;
import java.io.IOException;
import java.util.concurrent.CountDownLatch;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -52,11 +50,13 @@ import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, LargeTests.class})
@Ignore
public class TestMasterProcedureWalLease {
- private static final Log LOG = LogFactory.getLog(TestMasterProcedureWalLease.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestMasterProcedureWalLease.class);
@Rule
public TestName name = new TestName();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyNamespaceProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyNamespaceProcedure.java
index c1b28967b4..8dec59d0e8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyNamespaceProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyNamespaceProcedure.java
@@ -23,8 +23,6 @@ import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HTableDescriptor;
@@ -42,10 +40,12 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, MediumTests.class})
public class TestModifyNamespaceProcedure {
- private static final Log LOG = LogFactory.getLog(TestModifyNamespaceProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestModifyNamespaceProcedure.class);
protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java
index bb531ce0c2..f47654f7ea 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java
@@ -24,8 +24,6 @@ import static org.junit.Assert.assertTrue;
import java.util.List;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -46,10 +44,12 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, MediumTests.class})
public class TestProcedureAdmin {
- private static final Log LOG = LogFactory.getLog(TestProcedureAdmin.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestProcedureAdmin.class);
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()).
withLookingForStuckThread(true).build();
@Rule public TestName name = new TestName();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRestoreSnapshotProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRestoreSnapshotProcedure.java
index 13146f72b2..3946ee9927 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRestoreSnapshotProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRestoreSnapshotProcedure.java
@@ -26,8 +26,6 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
@@ -52,10 +50,12 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, MediumTests.class})
public class TestRestoreSnapshotProcedure extends TestTableDDLProcedureBase {
- private static final Log LOG = LogFactory.getLog(TestRestoreSnapshotProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRestoreSnapshotProcedure.class);
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()).
withLookingForStuckThread(true).build();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSafemodeBringsDownMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSafemodeBringsDownMaster.java
index 08070adf9f..389ed62e6b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSafemodeBringsDownMaster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSafemodeBringsDownMaster.java
@@ -22,8 +22,6 @@ import static org.junit.Assert.assertTrue;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName;
@@ -44,10 +42,12 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category(MediumTests.class)
public class TestSafemodeBringsDownMaster {
- private static final Log LOG = LogFactory.getLog(TestSafemodeBringsDownMaster.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSafemodeBringsDownMaster.class);
protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestServerCrashProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestServerCrashProcedure.java
index 627cbe8ffd..c1d1812c4c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestServerCrashProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestServerCrashProcedure.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.master.procedure;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HRegionInfo;
@@ -41,10 +39,12 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, LargeTests.class})
public class TestServerCrashProcedure {
- private static final Log LOG = LogFactory.getLog(TestServerCrashProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestServerCrashProcedure.class);
private HBaseTestingUtility util;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTableDDLProcedureBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTableDDLProcedureBase.java
index f7b4100b52..f7cf640379 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTableDDLProcedureBase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTableDDLProcedureBase.java
@@ -18,8 +18,6 @@
package org.apache.hadoop.hbase.master.procedure;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HTableDescriptor;
@@ -30,10 +28,13 @@ import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import static org.junit.Assert.assertTrue;
public abstract class TestTableDDLProcedureBase {
- private static final Log LOG = LogFactory.getLog(TestTableDDLProcedureBase.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestTableDDLProcedureBase.class);
protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static void setupConf(Configuration conf) {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTruncateTableProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTruncateTableProcedure.java
index 2368af5561..b70bdd554a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTruncateTableProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTruncateTableProcedure.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.master.procedure;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotDisabledException;
@@ -39,10 +37,12 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, MediumTests.class})
public class TestTruncateTableProcedure extends TestTableDDLProcedureBase {
- private static final Log LOG = LogFactory.getLog(TestTruncateTableProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestTruncateTableProcedure.class);
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()).
withLookingForStuckThread(true).build();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java
index 157d08bd40..2834b8f404 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java
@@ -20,11 +20,10 @@ package org.apache.hadoop.hbase.master.procedure;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.TestProcedure;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
@@ -39,6 +38,8 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
@@ -46,7 +47,7 @@ import static org.junit.Assert.assertTrue;
@Category({MasterTests.class, LargeTests.class})
public class TestWALProcedureStoreOnHDFS {
- private static final Log LOG = LogFactory.getLog(TestWALProcedureStoreOnHDFS.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestWALProcedureStoreOnHDFS.class);
protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
@@ -58,7 +59,7 @@ public class TestWALProcedureStoreOnHDFS {
@Override
public void abortProcess() {
- LOG.fatal("Abort the Procedure Store");
+ LOG.error(HBaseMarkers.FATAL, "Abort the Procedure Store");
store.stop(true);
}
};
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java
index 15e3c9a61c..fdc5b44657 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java
@@ -32,8 +32,6 @@ import java.util.List;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -50,7 +48,8 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos;
@@ -61,7 +60,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos;
@Category({MasterTests.class, MediumTests.class})
public class TestSnapshotFileCache {
- private static final Log LOG = LogFactory.getLog(TestSnapshotFileCache.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSnapshotFileCache.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static long sequenceId = 0;
private static FileSystem fs;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotHFileCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotHFileCleaner.java
index fba250da9b..ad6c58e4eb 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotHFileCleaner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotHFileCleaner.java
@@ -23,8 +23,6 @@ import java.io.IOException;
import java.util.Collection;
import java.util.HashSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -46,6 +44,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test that the snapshot hfile cleaner finds hfiles referenced in a snapshot
@@ -53,7 +53,7 @@ import org.junit.rules.TestName;
@Category({MasterTests.class, SmallTests.class})
public class TestSnapshotHFileCleaner {
- private static final Log LOG = LogFactory.getLog(TestSnapshotFileCache.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSnapshotFileCache.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final String TABLE_NAME_STR = "testSnapshotManifest";
private static final String SNAPSHOT_NAME_STR = "testSnapshotManifest-snapshot";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestCachedMobFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestCachedMobFile.java
index 61f1cced62..32e65220d8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestCachedMobFile.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestCachedMobFile.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.mob;
import junit.framework.TestCase;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -38,10 +36,12 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Assert;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category(SmallTests.class)
public class TestCachedMobFile extends TestCase{
- static final Log LOG = LogFactory.getLog(TestCachedMobFile.class);
+ static final Logger LOG = LoggerFactory.getLogger(TestCachedMobFile.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private Configuration conf = TEST_UTIL.getConfiguration();
private CacheConfig cacheConf = new CacheConfig(conf);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFile.java
index 9b69411a31..f894fb2511 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFile.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFile.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.mob;
import junit.framework.TestCase;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -40,10 +38,12 @@ import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category(SmallTests.class)
public class TestMobFile extends TestCase {
- static final Log LOG = LogFactory.getLog(TestMobFile.class);
+ static final Logger LOG = LoggerFactory.getLogger(TestMobFile.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private Configuration conf = TEST_UTIL.getConfiguration();
private CacheConfig cacheConf = new CacheConfig(conf);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFileCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFileCache.java
index d4289862cf..42e652867d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFileCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFileCache.java
@@ -23,8 +23,6 @@ import java.util.Date;
import junit.framework.TestCase;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -42,10 +40,12 @@ import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category(SmallTests.class)
public class TestMobFileCache extends TestCase {
- static final Log LOG = LogFactory.getLog(TestMobFileCache.class);
+ static final Logger LOG = LoggerFactory.getLogger(TestMobFileCache.class);
private HBaseTestingUtility UTIL;
private HRegion region;
private Configuration conf;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java
index 54071d081d..8c974e676a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java
@@ -29,6 +29,7 @@ import java.util.Arrays;
import java.util.Calendar;
import java.util.Collections;
import java.util.List;
+import java.util.Objects;
import java.util.Optional;
import java.util.Random;
import java.util.concurrent.ExecutorService;
@@ -40,8 +41,6 @@ import java.util.concurrent.TimeUnit;
import javax.crypto.spec.SecretKeySpec;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -103,10 +102,12 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category(LargeTests.class)
public class TestMobCompactor {
- private static final Log LOG = LogFactory.getLog(TestMobCompactor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestMobCompactor.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static Configuration conf = null;
private TableName tableName;
@@ -715,7 +716,7 @@ public class TestMobCompactor {
Thread.sleep(50);
fileList = fs.listStatus(path);
for (FileStatus fileStatus: fileList) {
- LOG.info(fileStatus);
+ LOG.info(Objects.toString(fileStatus));
}
}
}
@@ -1040,7 +1041,7 @@ public class TestMobCompactor {
}
}
});
- ((ThreadPoolExecutor) pool).allowCoreThreadTimeOut(true);
+ pool.allowCoreThreadTimeOut(true);
return pool;
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java
index bf899ea177..34acbe493e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java
@@ -40,8 +40,6 @@ import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -86,10 +84,12 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category(LargeTests.class)
public class TestPartitionedMobCompactor {
- private static final Log LOG = LogFactory.getLog(TestPartitionedMobCompactor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestPartitionedMobCompactor.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private final static String family = "family";
private final static String qf = "qf";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java
index 2140cc1dc7..94680f2c80 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java
@@ -28,8 +28,6 @@ import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -85,6 +83,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
@@ -97,7 +97,7 @@ import static org.junit.Assert.fail;
public class TestNamespaceAuditor {
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().
withTimeout(this.getClass()).withLookingForStuckThread(true).build();
- private static final Log LOG = LogFactory.getLog(TestNamespaceAuditor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestNamespaceAuditor.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static Admin ADMIN;
private String prefix = "TestNamespaceAuditor";
@@ -198,7 +198,7 @@ public class TestNamespaceAuditor {
try {
ADMIN.createNamespace(nspDesc);
} catch (Exception exp) {
- LOG.warn(exp);
+ LOG.warn(exp.toString(), exp);
exceptionCaught = true;
} finally {
assertTrue(exceptionCaught);
@@ -211,7 +211,7 @@ public class TestNamespaceAuditor {
try {
ADMIN.createNamespace(nspDesc);
} catch (Exception exp) {
- LOG.warn(exp);
+ LOG.warn(exp.toString(), exp);
exceptionCaught = true;
} finally {
assertTrue(exceptionCaught);
@@ -224,7 +224,7 @@ public class TestNamespaceAuditor {
try {
ADMIN.createNamespace(nspDesc);
} catch (Exception exp) {
- LOG.warn(exp);
+ LOG.warn(exp.toString(), exp);
exceptionCaught = true;
} finally {
assertTrue(exceptionCaught);
@@ -237,7 +237,7 @@ public class TestNamespaceAuditor {
try {
ADMIN.createNamespace(nspDesc);
} catch (Exception exp) {
- LOG.warn(exp);
+ LOG.warn(exp.toString(), exp);
exceptionCaught = true;
} finally {
assertTrue(exceptionCaught);
@@ -416,7 +416,7 @@ public class TestNamespaceAuditor {
ADMIN.createTable(tableDescOne);
fail("Table " + tableOne.toString() + "creation should fail.");
} catch (Exception exp) {
- LOG.error(exp);
+ LOG.error(exp.toString(), exp);
}
assertFalse(ADMIN.tableExists(tableOne));
@@ -429,7 +429,7 @@ public class TestNamespaceAuditor {
ADMIN.createTable(tableDescOne);
} catch (Exception e) {
fail("Table " + tableOne.toString() + "creation should succeed.");
- LOG.error(e);
+ LOG.error(e.toString(), e);
}
assertTrue(ADMIN.tableExists(tableOne));
nstate = getNamespaceState(nsp1);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleMasterProcedureManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleMasterProcedureManager.java
index 8eb2e58827..579fcd3ab0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleMasterProcedureManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleMasterProcedureManager.java
@@ -23,8 +23,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.concurrent.ThreadPoolExecutor;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
@@ -32,13 +30,15 @@ import org.apache.hadoop.hbase.master.MasterServices;
import org.apache.hadoop.hbase.master.MetricsMaster;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class SimpleMasterProcedureManager extends MasterProcedureManager {
public static final String SIMPLE_SIGNATURE = "simple_test";
public static final String SIMPLE_DATA = "simple_test_data";
- private static final Log LOG = LogFactory.getLog(SimpleMasterProcedureManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SimpleMasterProcedureManager.class);
private MasterServices master;
private ProcedureCoordinator coordinator;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleRSProcedureManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleRSProcedureManager.java
index 7d6f80a1df..f5a858ab93 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleRSProcedureManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleRSProcedureManager.java
@@ -29,8 +29,6 @@ import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.DaemonThreadFactory;
@@ -39,10 +37,12 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class SimpleRSProcedureManager extends RegionServerProcedureManager {
- private static final Log LOG = LogFactory.getLog(SimpleRSProcedureManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SimpleRSProcedureManager.class);
private RegionServerServices rss;
private ProcedureMemberRpcs memberRpcs;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureManager.java
index 2f0b5b90ea..52a665d242 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureManager.java
@@ -22,8 +22,6 @@ import static org.junit.Assert.assertArrayEquals;
import java.io.IOException;
import java.util.HashMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.testclassification.MasterTests;
@@ -33,11 +31,13 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, SmallTests.class})
public class TestProcedureManager {
- private static final Log LOG = LogFactory.getLog(TestProcedureManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestProcedureManager.class);
private static final int NUM_RS = 2;
private static HBaseTestingUtility util = new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java
index 36ea086dfe..177abbc6ce 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java
@@ -34,8 +34,6 @@ import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.testclassification.MasterTests;
@@ -55,7 +53,8 @@ import org.mockito.internal.matchers.ArrayEquals;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.mockito.verification.VerificationMode;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
/**
@@ -64,7 +63,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@Category({MasterTests.class, MediumTests.class})
public class TestZKProcedure {
- private static final Log LOG = LogFactory.getLog(TestZKProcedure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestZKProcedure.class);
private static HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final String COORDINATOR_NODE_NAME = "coordinator";
private static final long KEEP_ALIVE = 100; // seconds
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java
index 5d680743c5..723ecd7ae2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java
@@ -28,8 +28,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -46,7 +44,8 @@ import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.mockito.verification.VerificationMode;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
/**
@@ -55,7 +54,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@Category({MasterTests.class, MediumTests.class})
public class TestZKProcedureControllers {
- private static final Log LOG = LogFactory.getLog(TestZKProcedureControllers.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestZKProcedureControllers.class);
private final static HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final String COHORT_NODE_NAME = "expected";
private static final String CONTROLLER_NODE_NAME = "controller";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/SpaceQuotaHelperForTests.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/SpaceQuotaHelperForTests.java
index 86df39fe52..be21f4969b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/SpaceQuotaHelperForTests.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/SpaceQuotaHelperForTests.java
@@ -28,8 +28,6 @@ import java.util.Random;
import java.util.Set;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -48,14 +46,15 @@ import org.apache.hadoop.hbase.regionserver.HStoreFile;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
import org.junit.rules.TestName;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.HashMultimap;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap;
@InterfaceAudience.Private
public class SpaceQuotaHelperForTests {
- private static final Log LOG = LogFactory.getLog(SpaceQuotaHelperForTests.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SpaceQuotaHelperForTests.class);
public static final int SIZE_PER_VALUE = 256;
public static final String F1 = "f1";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/SpaceQuotaSnapshotNotifierForTest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/SpaceQuotaSnapshotNotifierForTest.java
index 3198aa3e1e..46fb1e8908 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/SpaceQuotaSnapshotNotifierForTest.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/SpaceQuotaSnapshotNotifierForTest.java
@@ -19,10 +19,10 @@ package org.apache.hadoop.hbase.quotas;
import java.util.HashMap;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Connection;
/**
@@ -30,7 +30,8 @@ import org.apache.hadoop.hbase.client.Connection;
*/
@InterfaceAudience.Private
public class SpaceQuotaSnapshotNotifierForTest implements SpaceQuotaSnapshotNotifier {
- private static final Log LOG = LogFactory.getLog(SpaceQuotaSnapshotNotifierForTest.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(SpaceQuotaSnapshotNotifierForTest.class);
private final Map tableQuotaSnapshots = new HashMap<>();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java
index 2a16739af5..788fadaa36 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java
@@ -18,10 +18,9 @@
package org.apache.hadoop.hbase.quotas;
+import java.util.Objects;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -44,7 +43,8 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables;
import static org.junit.Assert.assertEquals;
@@ -57,7 +57,7 @@ import static org.junit.Assert.fail;
*/
@Category({ClientTests.class, MediumTests.class})
public class TestQuotaAdmin {
- private static final Log LOG = LogFactory.getLog(TestQuotaAdmin.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestQuotaAdmin.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@@ -148,7 +148,7 @@ public class TestQuotaAdmin {
int countThrottle = 0;
int countGlobalBypass = 0;
for (QuotaSettings settings: scanner) {
- LOG.debug(settings);
+ LOG.debug(Objects.toString(settings));
switch (settings.getQuotaType()) {
case THROTTLE:
ThrottleSettings throttle = (ThrottleSettings)settings;
@@ -419,7 +419,7 @@ public class TestQuotaAdmin {
try {
int count = 0;
for (QuotaSettings settings: scanner) {
- LOG.debug(settings);
+ LOG.debug(Objects.toString(settings));
count++;
}
return count;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChoreRegionReports.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChoreRegionReports.java
index c57a89ff17..8451acf5ad 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChoreRegionReports.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChoreRegionReports.java
@@ -26,8 +26,6 @@ import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -50,13 +48,16 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A test case to verify that region reports are expired when they are not sent.
*/
@Category(LargeTests.class)
public class TestQuotaObserverChoreRegionReports {
- private static final Log LOG = LogFactory.getLog(TestQuotaObserverChoreRegionReports.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestQuotaObserverChoreRegionReports.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@Rule
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChoreWithMiniCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChoreWithMiniCluster.java
index 736be8d0f4..06a06fd4ed 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChoreWithMiniCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChoreWithMiniCluster.java
@@ -32,8 +32,6 @@ import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.NamespaceDescriptor;
@@ -52,7 +50,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap;
@@ -61,7 +60,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap;
*/
@Category(LargeTests.class)
public class TestQuotaObserverChoreWithMiniCluster {
- private static final Log LOG = LogFactory.getLog(TestQuotaObserverChoreWithMiniCluster.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestQuotaObserverChoreWithMiniCluster.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final AtomicLong COUNTER = new AtomicLong(0);
private static final long DEFAULT_WAIT_MILLIS = 500;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaStatusRPCs.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaStatusRPCs.java
index a5fe4069a2..a44ad74f50 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaStatusRPCs.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaStatusRPCs.java
@@ -26,8 +26,6 @@ import java.util.Map.Entry;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName;
@@ -48,13 +46,15 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test class for the quota status RPCs in the master and regionserver.
*/
@Category({MediumTests.class})
public class TestQuotaStatusRPCs {
- private static final Log LOG = LogFactory.getLog(TestQuotaStatusRPCs.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestQuotaStatusRPCs.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final AtomicLong COUNTER = new AtomicLong(0);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java
index b1b2797ce4..dfb3484df0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java
@@ -18,17 +18,15 @@
package org.apache.hadoop.hbase.quotas;
+import java.util.Objects;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.testclassification.MediumTests;
@@ -44,12 +42,14 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
@Category({RegionServerTests.class, MediumTests.class})
public class TestQuotaThrottle {
- private final static Log LOG = LogFactory.getLog(TestQuotaThrottle.class);
+ private final static Logger LOG = LoggerFactory.getLogger(TestQuotaThrottle.class);
private final static int REFRESH_TIME = 30 * 60000;
@@ -587,9 +587,9 @@ public class TestQuotaThrottle {
}
LOG.debug("QuotaCache");
- LOG.debug(quotaCache.getNamespaceQuotaCache());
- LOG.debug(quotaCache.getTableQuotaCache());
- LOG.debug(quotaCache.getUserQuotaCache());
+ LOG.debug(Objects.toString(quotaCache.getNamespaceQuotaCache()));
+ LOG.debug(Objects.toString(quotaCache.getTableQuotaCache()));
+ LOG.debug(Objects.toString(quotaCache.getUserQuotaCache()));
}
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestRegionSizeUse.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestRegionSizeUse.java
index 035216c6e6..021ebf2336 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestRegionSizeUse.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestRegionSizeUse.java
@@ -25,8 +25,6 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -47,13 +45,15 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test class which verifies that region sizes are reported to the master.
*/
@Category(MediumTests.class)
public class TestRegionSizeUse {
- private static final Log LOG = LogFactory.getLog(TestRegionSizeUse.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRegionSizeUse.class);
private static final int SIZE_PER_VALUE = 256;
private static final int NUM_SPLITS = 10;
private static final String F1 = "f1";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSnapshotQuotaObserverChore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSnapshotQuotaObserverChore.java
index 27fc2a122c..368ab12792 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSnapshotQuotaObserverChore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSnapshotQuotaObserverChore.java
@@ -26,8 +26,6 @@ import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
@@ -57,7 +55,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.HashMultimap;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap;
@@ -67,7 +66,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap;
*/
@Category(MediumTests.class)
public class TestSnapshotQuotaObserverChore {
- private static final Log LOG = LogFactory.getLog(TestSnapshotQuotaObserverChore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSnapshotQuotaObserverChore.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final AtomicLong COUNTER = new AtomicLong();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotas.java
index e923cc611b..98186c5750 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotas.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotas.java
@@ -29,8 +29,6 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -72,13 +70,15 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* End-to-end test class for filesystem space quotas.
*/
@Category(LargeTests.class)
public class TestSpaceQuotas {
- private static final Log LOG = LogFactory.getLog(TestSpaceQuotas.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSpaceQuotas.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
// Global for all tests in the class
private static final AtomicLong COUNTER = new AtomicLong(0);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotasWithSnapshots.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotasWithSnapshots.java
index 822026e64d..6ae28e6020 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotasWithSnapshots.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotasWithSnapshots.java
@@ -25,8 +25,6 @@ import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
@@ -51,7 +49,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables;
/**
@@ -59,7 +58,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables;
*/
@Category({LargeTests.class})
public class TestSpaceQuotasWithSnapshots {
- private static final Log LOG = LogFactory.getLog(TestSpaceQuotasWithSnapshots.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSpaceQuotasWithSnapshots.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
// Global for all tests in the class
private static final AtomicLong COUNTER = new AtomicLong(0);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSuperUserQuotaPermissions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSuperUserQuotaPermissions.java
index 3a60cbbd19..300268f087 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSuperUserQuotaPermissions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSuperUserQuotaPermissions.java
@@ -26,8 +26,6 @@ import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -51,13 +49,15 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test class to verify that the HBase superuser can override quotas.
*/
@Category(MediumTests.class)
public class TestSuperUserQuotaPermissions {
- private static final Log LOG = LogFactory.getLog(TestSuperUserQuotaPermissions.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSuperUserQuotaPermissions.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
// Default to the user running the tests
private static final String SUPERUSER_NAME = System.getProperty("user.name");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java
index a311501d9a..4942c228e5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java
@@ -28,8 +28,6 @@ import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -43,6 +41,8 @@ import org.apache.hadoop.hbase.io.hfile.HFileContext;
import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
import org.apache.hadoop.hbase.util.BloomFilterFactory;
import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Creates an HFile with random key/value pairs.
@@ -55,8 +55,8 @@ public class CreateRandomStoreFile {
*/
private static final int LEN_VARIATION = 5;
- private static final Log LOG =
- LogFactory.getLog(CreateRandomStoreFile.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(CreateRandomStoreFile.class);
private static final String OUTPUT_DIR_OPTION = "o";
private static final String NUM_KV_OPTION = "n";
private static final String HFILE_VERSION_OPTION = "h";
@@ -122,7 +122,7 @@ public class CreateRandomStoreFile {
try {
cmdLine = parser.parse(options, args);
} catch (ParseException ex) {
- LOG.error(ex);
+ LOG.error(ex.toString(), ex);
return false;
}
@@ -172,12 +172,12 @@ public class CreateRandomStoreFile {
int blockSize = HConstants.DEFAULT_BLOCKSIZE;
if (cmdLine.hasOption(BLOCK_SIZE_OPTION))
blockSize = Integer.valueOf(cmdLine.getOptionValue(BLOCK_SIZE_OPTION));
-
+
if (cmdLine.hasOption(BLOOM_BLOCK_SIZE_OPTION)) {
conf.setInt(BloomFilterFactory.IO_STOREFILE_BLOOM_BLOCK_SIZE,
Integer.valueOf(cmdLine.getOptionValue(BLOOM_BLOCK_SIZE_OPTION)));
}
-
+
if (cmdLine.hasOption(INDEX_BLOCK_SIZE_OPTION)) {
conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY,
Integer.valueOf(cmdLine.getOptionValue(INDEX_BLOCK_SIZE_OPTION)));
@@ -299,7 +299,7 @@ public class CreateRandomStoreFile {
if (!app.run(args))
System.exit(EXIT_FAILURE);
} catch (IOException ex) {
- LOG.error(ex);
+ LOG.error(ex.toString(), ex);
System.exit(EXIT_FAILURE);
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java
index 5f858264ad..ab1501ee35 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java
@@ -32,8 +32,6 @@ import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -55,13 +53,15 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.compress.CompressionOutputStream;
import org.apache.hadoop.io.compress.Compressor;
import org.apache.hadoop.io.compress.Decompressor;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tests various algorithms for key compression on an existing HFile. Useful
* for testing, debugging and benchmarking.
*/
public class DataBlockEncodingTool {
- private static final Log LOG = LogFactory.getLog(
+ private static final Logger LOG = LoggerFactory.getLogger(
DataBlockEncodingTool.class);
private static final boolean includesMemstoreTS = true;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
index af82692aa6..774888c642 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
@@ -27,13 +27,12 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
+import java.util.Objects;
import java.util.Random;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -75,6 +74,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Testing of HRegion.incrementColumnValue, HRegion.increment,
@@ -82,7 +83,7 @@ import org.junit.rules.TestName;
*/
@Category({VerySlowRegionServerTests.class, MediumTests.class}) // Starts 100 threads
public class TestAtomicOperation {
- private static final Log LOG = LogFactory.getLog(TestAtomicOperation.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestAtomicOperation.class);
@Rule public TestName name = new TestName();
HRegion region = null;
@@ -107,8 +108,8 @@ public class TestAtomicOperation {
public void teardown() throws IOException {
if (region != null) {
BlockCache bc = region.getStores().get(0).getCacheConfig().getBlockCache();
- ((HRegion)region).close();
- WAL wal = ((HRegion)region).getWAL();
+ region.close();
+ WAL wal = region.getWAL();
if (wal != null) wal.close();
if (bc != null) bc.shutdown();
region = null;
@@ -428,7 +429,7 @@ public class TestAtomicOperation {
Get g = new Get(row);
Result r = region.get(g);
if (r.size() != 1) {
- LOG.debug(r);
+ LOG.debug(Objects.toString(r));
failures.incrementAndGet();
fail();
}
@@ -525,7 +526,7 @@ public class TestAtomicOperation {
;
rs.close();
if (r.size() != 1) {
- LOG.debug(r);
+ LOG.debug(Objects.toString(r));
failures.incrementAndGet();
fail();
}
@@ -627,6 +628,7 @@ public class TestAtomicOperation {
this.region = region;
}
+ @Override
public void doWork() throws Exception {
Put[] puts = new Put[1];
Put put = new Put(Bytes.toBytes("r1"));
@@ -644,6 +646,7 @@ public class TestAtomicOperation {
this.region = region;
}
+ @Override
public void doWork() throws Exception {
Put[] puts = new Put[1];
Put put = new Put(Bytes.toBytes("r1"));
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java
index c40b24a47a..6331e86919 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java
@@ -24,8 +24,6 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
@@ -50,13 +48,15 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
import org.junit.*;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static junit.framework.TestCase.assertTrue;
import static org.junit.Assert.assertEquals;
@Category({RegionServerTests.class, MediumTests.class})
public class TestBlocksRead {
- private static final Log LOG = LogFactory.getLog(TestBlocksRead.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestBlocksRead.class);
@Rule public TestName testName = new TestName();
static final BloomType[] BLOOM_TYPE = new BloomType[] { BloomType.ROWCOL,
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java
index feb456efc9..02a21b6aac 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java
@@ -27,8 +27,6 @@ import java.util.Collection;
import java.util.List;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -63,6 +61,8 @@ import org.junit.rules.TestName;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tests {@link HFile} cache-on-write functionality for data blocks, non-root
@@ -72,7 +72,7 @@ import org.junit.runners.Parameterized.Parameters;
@Category({RegionServerTests.class, MediumTests.class})
public class TestCacheOnWriteInSchema {
- private static final Log LOG = LogFactory.getLog(TestCacheOnWriteInSchema.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestCacheOnWriteInSchema.class);
@Rule public TestName name = new TestName();
private static final HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java
index 4a26f76cb0..f6cd4e29d4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java
@@ -30,8 +30,6 @@ import java.util.HashSet;
import java.util.List;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -52,6 +50,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({RegionServerTests.class, MediumTests.class})
public class TestColumnSeeking {
@@ -60,7 +60,7 @@ public class TestColumnSeeking {
withLookingForStuckThread(true).build();
private final static HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU();
- private static final Log LOG = LogFactory.getLog(TestColumnSeeking.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestColumnSeeking.class);
@SuppressWarnings("unchecked")
@Test
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactSplitThread.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactSplitThread.java
index 6d06494670..4df21e0bf1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactSplitThread.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactSplitThread.java
@@ -18,8 +18,7 @@
package org.apache.hadoop.hbase.regionserver;
import java.util.Collection;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -36,12 +35,14 @@ import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
@Category(MediumTests.class)
public class TestCompactSplitThread {
- private static final Log LOG = LogFactory.getLog(TestCompactSplitThread.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestCompactSplitThread.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private final TableName tableName = TableName.valueOf(getClass().getSimpleName());
private final byte[] family = Bytes.toBytes("f");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingMemStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingMemStore.java
index 0f18deeab4..6fbf99adf6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingMemStore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingMemStore.java
@@ -23,8 +23,6 @@ import java.lang.management.ManagementFactory;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
@@ -54,6 +52,8 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@@ -64,7 +64,7 @@ import static org.junit.Assert.assertTrue;
@Category({RegionServerTests.class, MediumTests.class})
public class TestCompactingMemStore extends TestDefaultMemStore {
- private static final Log LOG = LogFactory.getLog(TestCompactingMemStore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestCompactingMemStore.class);
protected static ChunkCreator chunkCreator;
protected HRegion region;
protected RegionServicesForStores regionServicesForStores;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingToCellFlatMapMemStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingToCellFlatMapMemStore.java
index b4cf4ec342..673acdd8c2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingToCellFlatMapMemStore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingToCellFlatMapMemStore.java
@@ -18,8 +18,6 @@
*/
package org.apache.hadoop.hbase.regionserver;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -38,6 +36,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.List;
@@ -52,7 +52,8 @@ public class TestCompactingToCellFlatMapMemStore extends TestCompactingMemStore
public static Object[] data() {
return new Object[] { "CHUNK_MAP", "ARRAY_MAP" }; // test different immutable indexes
}
- private static final Log LOG = LogFactory.getLog(TestCompactingToCellFlatMapMemStore.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestCompactingToCellFlatMapMemStore.class);
public final boolean toCellChunkMap;
Configuration conf;
//////////////////////////////////////////////////////////////////////////////
@@ -87,6 +88,7 @@ public class TestCompactingToCellFlatMapMemStore extends TestCompactingMemStore
//////////////////////////////////////////////////////////////////////////////
// Compaction tests
//////////////////////////////////////////////////////////////////////////////
+ @Override
public void testCompaction1Bucket() throws IOException {
int counter = 0;
String[] keys1 = { "A", "A", "B", "C" }; //A1, A2, B3, C4
@@ -134,6 +136,7 @@ public class TestCompactingToCellFlatMapMemStore extends TestCompactingMemStore
memstore.clearSnapshot(snapshot.getId());
}
+ @Override
public void testCompaction2Buckets() throws IOException {
if (toCellChunkMap) {
// set memstore to flat into CellChunkMap
@@ -195,6 +198,7 @@ public class TestCompactingToCellFlatMapMemStore extends TestCompactingMemStore
memstore.clearSnapshot(snapshot.getId());
}
+ @Override
public void testCompaction3Buckets() throws IOException {
if (toCellChunkMap) {
// set memstore to flat into CellChunkMap
@@ -572,6 +576,7 @@ public class TestCompactingToCellFlatMapMemStore extends TestCompactingMemStore
assertTrue(chunkCreator.getPoolSize() > 0);
}
+ @Override
@Test
public void testPuttingBackChunksAfterFlushing() throws IOException {
byte[] row = Bytes.toBytes("testrow");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionFileNotFound.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionFileNotFound.java
index 5735e88272..112fe4de30 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionFileNotFound.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionFileNotFound.java
@@ -24,8 +24,6 @@ import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName;
@@ -43,6 +41,8 @@ import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This class tests the scenario where a store refresh happens due to a file not found during scan,
@@ -51,7 +51,7 @@ import org.junit.experimental.categories.Category;
*/
@Category(MediumTests.class)
public class TestCompactionFileNotFound {
- private static final Log LOG = LogFactory.getLog(TestCompactionFileNotFound.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestCompactionFileNotFound.class);
private static final HBaseTestingUtility util = new HBaseTestingUtility();
private static final TableName TEST_TABLE = TableName.valueOf("test");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionInDeadRegionServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionInDeadRegionServer.java
index 1b39a6d363..457f002987 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionInDeadRegionServer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionInDeadRegionServer.java
@@ -24,8 +24,6 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
@@ -53,6 +51,8 @@ import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameter;
import org.junit.runners.Parameterized.Parameters;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This testcase is used to ensure that the compaction marker will fail a compaction if the RS is
@@ -61,7 +61,7 @@ import org.junit.runners.Parameterized.Parameters;
@RunWith(Parameterized.class)
@Category({ RegionServerTests.class, LargeTests.class })
public class TestCompactionInDeadRegionServer {
- private static final Log LOG = LogFactory.getLog(TestCompactionInDeadRegionServer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestCompactionInDeadRegionServer.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionPolicy.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionPolicy.java
index 1e9fd409ce..56931b2834 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionPolicy.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionPolicy.java
@@ -23,8 +23,6 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -43,11 +41,12 @@ import org.apache.hadoop.hbase.util.FSUtils;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
public class TestCompactionPolicy {
- private final static Log LOG = LogFactory.getLog(TestCompactionPolicy.class);
+ private final static Logger LOG = LoggerFactory.getLogger(TestCompactionPolicy.class);
protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
protected Configuration conf;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java
index b30a88459a..241d0648de 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java
@@ -26,8 +26,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.client.Admin;
@@ -43,11 +41,13 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/** Unit tests to test retrieving table/region compaction state*/
@Category({VerySlowRegionServerTests.class, LargeTests.class})
public class TestCompactionState {
- private static final Log LOG = LogFactory.getLog(TestCompactionState.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestCompactionState.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private final static Random random = new Random();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
index 58c76ca141..12763f9fff 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
@@ -32,8 +32,6 @@ import java.util.Collections;
import java.util.List;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -60,6 +58,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tests writing Bloom filter blocks in the same part of the file as data
@@ -71,7 +71,7 @@ public class TestCompoundBloomFilter {
private static final HBaseTestingUtility TEST_UTIL =
new HBaseTestingUtility();
- private static final Log LOG = LogFactory.getLog(
+ private static final Logger LOG = LoggerFactory.getLogger(
TestCompoundBloomFilter.class);
private static final int NUM_TESTS = 9;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java
index 0a0bf5d720..d4f60859de 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java
@@ -27,11 +27,10 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
+import java.util.Objects;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
@@ -67,7 +66,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@@ -75,7 +75,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
/** memstore test case */
@Category({RegionServerTests.class, MediumTests.class})
public class TestDefaultMemStore {
- private static final Log LOG = LogFactory.getLog(TestDefaultMemStore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestDefaultMemStore.class);
@Rule public TestName name = new TestName();
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()).
withLookingForStuckThread(true).build();
@@ -167,7 +167,7 @@ public class TestDefaultMemStore {
int count = 0;
try (StoreScanner s = new StoreScanner(scan, scanInfo, null, memstorescanners)) {
while (s.next(result)) {
- LOG.info(result);
+ LOG.info(Objects.toString(result));
count++;
// Row count is same as column count.
assertEquals(rowCount, result.size());
@@ -184,7 +184,7 @@ public class TestDefaultMemStore {
count = 0;
try (StoreScanner s = new StoreScanner(scan, scanInfo, null, memstorescanners)) {
while (s.next(result)) {
- LOG.info(result);
+ LOG.info(Objects.toString(result));
// Assert the stuff is coming out in right order.
assertTrue(CellUtil.matchingRows(result.get(0), Bytes.toBytes(count)));
count++;
@@ -208,7 +208,7 @@ public class TestDefaultMemStore {
int snapshotIndex = 5;
try (StoreScanner s = new StoreScanner(scan, scanInfo, null, memstorescanners)) {
while (s.next(result)) {
- LOG.info(result);
+ LOG.info(Objects.toString(result));
// Assert the stuff is coming out in right order.
assertTrue(CellUtil.matchingRows(result.get(0), Bytes.toBytes(count)));
// Row count is same as column count.
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java
index b2ef2f7f8b..ec714be7ae 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java
@@ -29,8 +29,6 @@ import java.util.List;
import javax.crypto.spec.SecretKeySpec;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -57,10 +55,12 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({RegionServerTests.class, MediumTests.class})
public class TestEncryptionKeyRotation {
- private static final Log LOG = LogFactory.getLog(TestEncryptionKeyRotation.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestEncryptionKeyRotation.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final Configuration conf = TEST_UTIL.getConfiguration();
private static final Key initialCFKey;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java
index fa4d7f0672..e5b8a6146a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java
@@ -30,8 +30,6 @@ import java.util.TreeSet;
import java.util.stream.Collectors;
import org.apache.commons.io.IOUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ChoreService;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -63,12 +61,13 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterators;
@Category(LargeTests.class)
public class TestEndToEndSplitTransaction {
- private static final Log LOG = LogFactory.getLog(TestEndToEndSplitTransaction.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestEndToEndSplitTransaction.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final Configuration CONF = TEST_UTIL.getConfiguration();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java
index 48081bdbb2..bd66bde3da 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java
@@ -29,8 +29,6 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FilterFileSystem;
@@ -58,6 +56,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test cases that ensure that file system level errors are bubbled up
@@ -65,7 +65,7 @@ import org.junit.rules.TestName;
*/
@Category({RegionServerTests.class, MediumTests.class})
public class TestFSErrorsExposed {
- private static final Log LOG = LogFactory.getLog(TestFSErrorsExposed.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestFSErrorsExposed.class);
HBaseTestingUtility util = new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFailedAppendAndSync.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFailedAppendAndSync.java
index 015894901c..84e424eb84 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFailedAppendAndSync.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFailedAppendAndSync.java
@@ -27,8 +27,6 @@ import static org.mockito.Mockito.when;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -55,6 +53,8 @@ import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.mockito.Mockito;
import org.mockito.exceptions.verification.WantedButNotInvoked;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Testing sync/append failures.
@@ -62,7 +62,7 @@ import org.mockito.exceptions.verification.WantedButNotInvoked;
*/
@Category({MediumTests.class})
public class TestFailedAppendAndSync {
- private static final Log LOG = LogFactory.getLog(TestFailedAppendAndSync.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestFailedAppendAndSync.class);
@Rule public TestName name = new TestName();
private static final String COLUMN_FAMILY = "MyCF";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java
index 9c87ebe43f..642b47108f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java
@@ -21,9 +21,8 @@ package org.apache.hadoop.hbase.regionserver;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
+import java.util.Objects;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -49,6 +48,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
@@ -60,7 +61,7 @@ import static org.junit.Assert.assertTrue;
@Category({RegionServerTests.class, MediumTests.class})
public class TestGetClosestAtOrBefore {
@Rule public TestName testName = new TestName();
- private static final Log LOG = LogFactory.getLog(TestGetClosestAtOrBefore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestGetClosestAtOrBefore.class);
private static final byte[] T00 = Bytes.toBytes("000");
private static final byte[] T10 = Bytes.toBytes("010");
@@ -104,8 +105,8 @@ public class TestGetClosestAtOrBefore {
InternalScanner s = mr.getScanner(new Scan());
try {
List keys = new ArrayList<>();
- while (s.next(keys)) {
- LOG.info(keys);
+ while (s.next(keys)) {
+ LOG.info(Objects.toString(keys));
keys.clear();
}
} finally {
@@ -293,8 +294,8 @@ public class TestGetClosestAtOrBefore {
} finally {
if (region != null) {
try {
- WAL wal = ((HRegion)region).getWAL();
- ((HRegion)region).close();
+ WAL wal = region.getWAL();
+ region.close();
wal.close();
} catch (Exception e) {
e.printStackTrace();
@@ -351,8 +352,8 @@ public class TestGetClosestAtOrBefore {
} finally {
if (region != null) {
try {
- WAL wal = ((HRegion)region).getWAL();
- ((HRegion)region).close();
+ WAL wal = region.getWAL();
+ region.close();
wal.close();
} catch (Exception e) {
e.printStackTrace();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java
index 95efa80625..32e3856f0a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java
@@ -35,8 +35,6 @@ import java.util.concurrent.ConcurrentSkipListSet;
import javax.crypto.spec.SecretKeySpec;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -78,10 +76,12 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category(MediumTests.class)
public class TestHMobStore {
- public static final Log LOG = LogFactory.getLog(TestHMobStore.class);
+ public static final Logger LOG = LoggerFactory.getLogger(TestHMobStore.class);
@Rule public TestName name = new TestName();
private HMobStore store;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
index 3482955b2a..0098091699 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
@@ -51,6 +51,7 @@ import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
+import java.util.Objects;
import java.util.TreeMap;
import java.util.UUID;
import java.util.concurrent.Callable;
@@ -64,8 +65,6 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.commons.lang3.RandomStringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
@@ -177,7 +176,8 @@ import org.mockito.ArgumentMatcher;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -199,7 +199,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescript
public class TestHRegion {
// Do not spin up clusters in here. If you need to spin up a cluster, do it
// over in TestHRegionOnCluster.
- private static final Log LOG = LogFactory.getLog(TestHRegion.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHRegion.class);
@Rule
public TestName name = new TestName();
@ClassRule
@@ -927,7 +927,7 @@ public class TestHRegion {
// now check whether we have only one store file, the compacted one
Collection sfs = region.getStore(family).getStorefiles();
for (HStoreFile sf : sfs) {
- LOG.info(sf.getPath());
+ LOG.info(Objects.toString(sf.getPath()));
}
if (!mismatchedRegionName) {
assertEquals(1, region.getStore(family).getStorefilesCount());
@@ -1081,7 +1081,7 @@ public class TestHRegion {
try {
desc = WALEdit.getFlushDescriptor(cells.get(0));
} catch (IOException e) {
- LOG.warn(e);
+ LOG.warn(e.toString(), e);
return false;
}
if (desc != null) {
@@ -2143,8 +2143,8 @@ public class TestHRegion {
byte[] value = Bytes.toBytes("value");
Put put = new Put(row1);
- put.addColumn(fam1, qual, (long) 1, value);
- put.addColumn(fam1, qual, (long) 2, value);
+ put.addColumn(fam1, qual, 1, value);
+ put.addColumn(fam1, qual, 2, value);
this.region = initHRegion(tableName, method, CONF, fam1);
try {
@@ -2591,7 +2591,7 @@ public class TestHRegion {
// extract the key values out the memstore:
// This is kinda hacky, but better than nothing...
long now = System.currentTimeMillis();
- AbstractMemStore memstore = (AbstractMemStore)((HStore) region.getStore(fam1)).memstore;
+ AbstractMemStore memstore = (AbstractMemStore)region.getStore(fam1).memstore;
Cell firstCell = memstore.getActive().first();
assertTrue(firstCell.getTimestamp() <= now);
now = firstCell.getTimestamp();
@@ -3879,7 +3879,7 @@ public class TestHRegion {
byte[] value = Bytes.toBytes(String.valueOf(numPutsFinished));
for (byte[] family : families) {
for (byte[] qualifier : qualifiers) {
- put.addColumn(family, qualifier, (long) numPutsFinished, value);
+ put.addColumn(family, qualifier, numPutsFinished, value);
}
}
region.put(put);
@@ -4115,7 +4115,7 @@ public class TestHRegion {
region.flush(true);
}
// before compaction
- HStore store = (HStore) region.getStore(fam1);
+ HStore store = region.getStore(fam1);
Collection storeFiles = store.getStorefiles();
for (HStoreFile storefile : storeFiles) {
StoreFileReader reader = storefile.getReader();
@@ -4208,7 +4208,7 @@ public class TestHRegion {
byte col[] = Bytes.toBytes("col1");
Put put = new Put(row);
- put.addColumn(familyName, col, (long) 1, Bytes.toBytes("SomeRandomValue"));
+ put.addColumn(familyName, col, 1, Bytes.toBytes("SomeRandomValue"));
region.put(put);
region.flush(true);
@@ -4255,8 +4255,8 @@ public class TestHRegion {
byte col[] = Bytes.toBytes("col1");
Put put = new Put(row);
- put.addColumn(fam1, col, (long) 1, Bytes.toBytes("test1"));
- put.addColumn(fam2, col, (long) 1, Bytes.toBytes("test2"));
+ put.addColumn(fam1, col, 1, Bytes.toBytes("test1"));
+ put.addColumn(fam2, col, 1, Bytes.toBytes("test2"));
ht.put(put);
HRegion firstRegion = htu.getHBaseCluster().getRegions(tableName).get(0);
@@ -6430,7 +6430,7 @@ public class TestHRegion {
byte[] value = Bytes.toBytes(String.valueOf(count));
for (byte[] family : families) {
for (byte[] qualifier : qualifiers) {
- put.addColumn(family, qualifier, (long) count, value);
+ put.addColumn(family, qualifier, count, value);
}
}
try {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionFileSystem.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionFileSystem.java
index 829b4884b5..0514005b60 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionFileSystem.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionFileSystem.java
@@ -30,8 +30,6 @@ import java.net.URI;
import java.util.Collection;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -57,11 +55,13 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({RegionServerTests.class, SmallTests.class})
public class TestHRegionFileSystem {
private static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
- private static final Log LOG = LogFactory.getLog(TestHRegionFileSystem.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHRegionFileSystem.class);
public static final byte[] FAMILY_NAME = Bytes.toBytes("info");
private static final byte[][] FAMILIES = {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionOnCluster.java
index 41ad68bf5b..9b89768dbf 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionOnCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionOnCluster.java
@@ -25,8 +25,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
@@ -48,6 +46,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
@@ -58,7 +58,7 @@ import org.junit.rules.TestName;
@Category({RegionServerTests.class, MediumTests.class})
public class TestHRegionOnCluster {
- private static final Log LOG = LogFactory.getLog(TestHRegionOnCluster.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHRegionOnCluster.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@Rule
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
index 8b5bf254af..55b5e643f6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
@@ -40,11 +40,10 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
+import java.util.Objects;
import java.util.Random;
import java.util.UUID;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path;
@@ -78,7 +77,6 @@ import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.wal.WALFactory;
-import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.wal.WALKeyImpl;
import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;
import org.apache.hadoop.util.StringUtils;
@@ -89,6 +87,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -109,7 +109,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescript
@Category(MediumTests.class)
public class TestHRegionReplayEvents {
- private static final Log LOG = LogFactory.getLog(TestHRegion.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHRegion.class);
@Rule public TestName name = new TestName();
private static HBaseTestingUtility TEST_UTIL;
@@ -1208,15 +1208,13 @@ public class TestHRegionReplayEvents {
@Test
public void testWriteFlushRequestMarker() throws IOException {
// primary region is empty at this point. Request a flush with writeFlushRequestWalMarker=false
- FlushResultImpl result = (FlushResultImpl) ((HRegion) primaryRegion).flushcache(true, false,
- FlushLifeCycleTracker.DUMMY);
+ FlushResultImpl result = primaryRegion.flushcache(true, false, FlushLifeCycleTracker.DUMMY);
assertNotNull(result);
assertEquals(result.result, FlushResultImpl.Result.CANNOT_FLUSH_MEMSTORE_EMPTY);
assertFalse(result.wroteFlushWalMarker);
// request flush again, but this time with writeFlushRequestWalMarker = true
- result = (FlushResultImpl) ((HRegion) primaryRegion).flushcache(true, true,
- FlushLifeCycleTracker.DUMMY);
+ result = primaryRegion.flushcache(true, true, FlushLifeCycleTracker.DUMMY);
assertNotNull(result);
assertEquals(result.result, FlushResultImpl.Result.CANNOT_FLUSH_MEMSTORE_EMPTY);
assertTrue(result.wroteFlushWalMarker);
@@ -1294,7 +1292,7 @@ public class TestHRegionReplayEvents {
reader = createWALReaderForPrimary();
while (true) {
WAL.Entry entry = reader.next();
- LOG.info(entry);
+ LOG.info(Objects.toString(entry));
if (entry == null) {
break;
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java
index 723b5706df..831d46bece 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java
@@ -30,8 +30,6 @@ import java.util.Map;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -83,7 +81,8 @@ import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
@@ -96,7 +95,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegi
@RunWith(Parameterized.class)
@Category({RegionServerTests.class, LargeTests.class})
public class TestHRegionServerBulkLoad {
- private static final Log LOG = LogFactory.getLog(TestHRegionServerBulkLoad.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHRegionServerBulkLoad.class);
protected static HBaseTestingUtility UTIL = new HBaseTestingUtility();
protected final static Configuration conf = UTIL.getConfiguration();
protected final static byte[] QUAL = Bytes.toBytes("qual");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldClient.java
index da4b740de8..7d652aa6fa 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldClient.java
@@ -22,8 +22,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HConstants;
@@ -46,7 +44,8 @@ import org.apache.hadoop.hbase.util.Pair;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
/**
@@ -56,12 +55,13 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@RunWith(Parameterized.class)
@Category({RegionServerTests.class, LargeTests.class})
public class TestHRegionServerBulkLoadWithOldClient extends TestHRegionServerBulkLoad {
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestHRegionServerBulkLoadWithOldClient.class);
+
public TestHRegionServerBulkLoadWithOldClient(int duration) {
super(duration);
}
- private static final Log LOG = LogFactory.getLog(TestHRegionServerBulkLoadWithOldClient.class);
-
public static class AtomicHFileLoader extends RepeatingTestThread {
final AtomicLong numBulkLoads = new AtomicLong();
final AtomicLong numCompactions = new AtomicLong();
@@ -73,6 +73,7 @@ public class TestHRegionServerBulkLoadWithOldClient extends TestHRegionServerBul
this.tableName = tableName;
}
+ @Override
public void doAnAction() throws Exception {
long iteration = numBulkLoads.getAndIncrement();
Path dir = UTIL.getDataTestDirOnTestFS(String.format("bulkLoad_%08d",
@@ -134,6 +135,7 @@ public class TestHRegionServerBulkLoadWithOldClient extends TestHRegionServerBul
}
}
+ @Override
void runAtomicBulkloadTest(TableName tableName, int millisToRun, int numScanners)
throws Exception {
setupTable(tableName, 10);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionWithInMemoryFlush.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionWithInMemoryFlush.java
index 5d432c9d37..ac10a35292 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionWithInMemoryFlush.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionWithInMemoryFlush.java
@@ -19,8 +19,7 @@
package org.apache.hadoop.hbase.regionserver;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName;
@@ -31,6 +30,8 @@ import org.apache.hadoop.hbase.wal.WAL;
import org.junit.ClassRule;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A test similar to TestHRegion, but with in-memory flush families.
@@ -41,7 +42,7 @@ import org.junit.rules.TestRule;
public class TestHRegionWithInMemoryFlush extends TestHRegion{
// Do not spin up clusters in here. If you need to spin up a cluster, do it
// over in TestHRegionOnCluster.
- private static final Log LOG = LogFactory.getLog(TestHRegionWithInMemoryFlush.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHRegionWithInMemoryFlush.class);
@ClassRule
public static final TestRule timeout =
CategoryBasedTimeout.forClass(TestHRegionWithInMemoryFlush.class);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java
index 6195848392..2745d3d787 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java
@@ -48,8 +48,6 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
@@ -112,7 +110,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.mockito.Mockito;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
/**
@@ -120,7 +119,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
*/
@Category({ RegionServerTests.class, MediumTests.class })
public class TestHStore {
- private static final Log LOG = LogFactory.getLog(TestHStore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHStore.class);
@Rule
public TestName name = new TestName();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java
index 4715e9570f..c9affaffc2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java
@@ -30,8 +30,6 @@ import java.util.OptionalLong;
import java.util.TreeSet;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -70,7 +68,8 @@ import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@@ -83,7 +82,7 @@ import static org.mockito.Mockito.when;
*/
@Category({RegionServerTests.class, SmallTests.class})
public class TestHStoreFile extends HBaseTestCase {
- private static final Log LOG = LogFactory.getLog(TestHStoreFile.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHStoreFile.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private CacheConfig cacheConf = new CacheConfig(TEST_UTIL.getConfiguration());
private static String ROOT_DIR = TEST_UTIL.getDataTestDir("TestStoreFile").toString();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java
index 60ed50b49a..d218389c89 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java
@@ -29,8 +29,6 @@ import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
@@ -51,6 +49,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test performance improvement of joined scanners optimization:
@@ -58,7 +58,7 @@ import org.junit.rules.TestName;
*/
@Category({RegionServerTests.class, LargeTests.class})
public class TestJoinedScanners {
- private static final Log LOG = LogFactory.getLog(TestJoinedScanners.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestJoinedScanners.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final String DIR = TEST_UTIL.getDataTestDir("TestJoinedScanners").toString();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java
index 40f513562d..2684bdf90c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java
@@ -35,8 +35,6 @@ import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
@@ -69,6 +67,8 @@ import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test major compactions
@@ -81,7 +81,7 @@ public class TestMajorCompaction {
return new Object[] { "NONE", "BASIC", "EAGER" };
}
@Rule public TestName name;
- private static final Log LOG = LogFactory.getLog(TestMajorCompaction.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(TestMajorCompaction.class.getName());
private static final HBaseTestingUtility UTIL = HBaseTestingUtility.createLocalHTU();
protected Configuration conf = UTIL.getConfiguration();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMasterAddressTracker.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMasterAddressTracker.java
index f1b4441a5f..e4b37541ae 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMasterAddressTracker.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMasterAddressTracker.java
@@ -25,8 +25,6 @@ import static org.junit.Assert.assertTrue;
import java.util.concurrent.Semaphore;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
@@ -39,11 +37,13 @@ import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.Rule;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.junit.experimental.categories.Category;
@Category({RegionServerTests.class, MediumTests.class})
public class TestMasterAddressTracker {
- private static final Log LOG = LogFactory.getLog(TestMasterAddressTracker.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestMasterAddressTracker.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@@ -157,7 +157,7 @@ public class TestMasterAddressTracker {
}
public static class NodeCreationListener extends ZKListener {
- private static final Log LOG = LogFactory.getLog(NodeCreationListener.class);
+ private static final Logger LOG = LoggerFactory.getLogger(NodeCreationListener.class);
private Semaphore lock;
private String node;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java
index a7b5cd5a00..98b0761a62 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java
@@ -24,8 +24,6 @@ import static org.apache.hadoop.hbase.HBaseTestingUtility.fam2;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -45,6 +43,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test minor compactions
@@ -52,7 +52,7 @@ import org.junit.rules.TestName;
@Category({RegionServerTests.class, MediumTests.class})
public class TestMinorCompaction {
@Rule public TestName name = new TestName();
- private static final Log LOG = LogFactory.getLog(TestMinorCompaction.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(TestMinorCompaction.class.getName());
private static final HBaseTestingUtility UTIL = HBaseTestingUtility.createLocalHTU();
protected Configuration conf = UTIL.getConfiguration();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java
index 3706f4e6bd..c6a300fe11 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java
@@ -33,8 +33,6 @@ import java.util.Map;
import java.util.Random;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -73,6 +71,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test mob store compaction
@@ -81,7 +81,7 @@ import org.junit.rules.TestName;
public class TestMobStoreCompaction {
@Rule
public TestName name = new TestName();
- static final Log LOG = LogFactory.getLog(TestMobStoreCompaction.class.getName());
+ static final Logger LOG = LoggerFactory.getLogger(TestMobStoreCompaction.class.getName());
private final static HBaseTestingUtility UTIL = new HBaseTestingUtility();
private Configuration conf = null;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java
index bd7639ecd5..19685ea395 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java
@@ -35,8 +35,6 @@ import java.util.Set;
import java.util.TreeSet;
import org.apache.commons.lang3.ArrayUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellUtil;
@@ -58,6 +56,8 @@ import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tests optimized scanning of multiple columns.
@@ -66,7 +66,7 @@ import org.junit.runners.Parameterized.Parameters;
@Category({RegionServerTests.class, MediumTests.class})
public class TestMultiColumnScanner {
- private static final Log LOG = LogFactory.getLog(TestMultiColumnScanner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestMultiColumnScanner.class);
private static final String TABLE_NAME =
TestMultiColumnScanner.class.getSimpleName();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestParallelPut.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestParallelPut.java
index 6bbb81dda0..ec161dc37e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestParallelPut.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestParallelPut.java
@@ -24,8 +24,6 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -49,6 +47,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
@@ -57,7 +57,7 @@ import org.junit.rules.TestName;
*/
@Category({RegionServerTests.class, MediumTests.class})
public class TestParallelPut {
- private static final Log LOG = LogFactory.getLog(TestParallelPut.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestParallelPut.class);
@Rule public TestName name = new TestName();
private HRegion region = null;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.java
index b8155e4571..cf1c104105 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.java
@@ -17,9 +17,16 @@
*/
package org.apache.hadoop.hbase.regionserver;
-import org.apache.hadoop.hbase.shaded.com.google.common.hash.Hashing;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Random;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -48,23 +55,17 @@ import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
import org.apache.hadoop.hbase.wal.WAL;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Random;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
+import org.apache.hadoop.hbase.shaded.com.google.common.hash.Hashing;
/**
* This test verifies the correctness of the Per Column Family flushing strategy
*/
@Category({ RegionServerTests.class, LargeTests.class })
public class TestPerColumnFamilyFlush {
- private static final Log LOG = LogFactory.getLog(TestPerColumnFamilyFlush.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestPerColumnFamilyFlush.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSKilledWhenInitializing.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSKilledWhenInitializing.java
index 1b2574e7a5..b4d04fcd8d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSKilledWhenInitializing.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSKilledWhenInitializing.java
@@ -27,11 +27,8 @@ import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
-import org.apache.hadoop.hbase.CoordinatedStateManager;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.LocalHBaseCluster;
@@ -53,7 +50,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse;
/**
@@ -62,7 +60,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProto
*/
@Category({RegionServerTests.class, MediumTests.class})
@Ignore("See HBASE-19515") public class TestRSKilledWhenInitializing {
- private static final Log LOG = LogFactory.getLog(TestRSKilledWhenInitializing.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRSKilledWhenInitializing.class);
@Rule public TestName testName = new TestName();
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().
withTimeout(this.getClass()).withLookingForStuckThread(true).build();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSStatusServlet.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSStatusServlet.java
index c9cae7ee43..412018f405 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSStatusServlet.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSStatusServlet.java
@@ -22,8 +22,6 @@ import java.io.IOException;
import java.io.StringWriter;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
@@ -47,7 +45,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.mockito.Mockito;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
@@ -61,7 +60,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerIn
*/
@Category({RegionServerTests.class, SmallTests.class})
public class TestRSStatusServlet {
- private static final Log LOG = LogFactory.getLog(TestRSStatusServlet.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRSStatusServlet.class);
private HRegionServer rs;
private RSRpcServices rpcServices;
private RpcServerInterface rpcServer;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.java
index 2c9a4375ed..97df08afb6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.java
@@ -23,8 +23,6 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -54,6 +52,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tests around replay of recovered.edits content.
@@ -61,7 +61,7 @@ import org.junit.rules.TestName;
@Category({MediumTests.class})
public class TestRecoveredEdits {
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
- private static final Log LOG = LogFactory.getLog(TestRecoveredEdits.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRecoveredEdits.class);
@Rule public TestName testName = new TestName();
/**
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionIncrement.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionIncrement.java
index 394b8a2e6a..d913c67815 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionIncrement.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionIncrement.java
@@ -24,8 +24,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ThreadLocalRandom;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
@@ -47,6 +45,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
@@ -60,7 +60,7 @@ import org.junit.rules.TestRule;
*/
@Category(MediumTests.class)
public class TestRegionIncrement {
- private static final Log LOG = LogFactory.getLog(TestRegionIncrement.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRegionIncrement.class);
@Rule public TestName name = new TestName();
@Rule public final TestRule timeout =
CategoryBasedTimeout.builder().withTimeout(this.getClass()).
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java
index ede9764fd8..0fba4e1363 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java
@@ -26,13 +26,12 @@ import static org.junit.Assert.fail;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
+import java.util.Objects;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.lang3.RandomUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -77,7 +76,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
@@ -87,7 +87,9 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProto
@Category({RegionServerTests.class, LargeTests.class})
public class TestRegionMergeTransactionOnCluster {
- private static final Log LOG = LogFactory.getLog(TestRegionMergeTransactionOnCluster.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestRegionMergeTransactionOnCluster.class);
+
@Rule public TestName name = new TestName();
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().
withTimeout(this.getClass()).
@@ -426,8 +428,8 @@ public class TestRegionMergeTransactionOnCluster {
MetaTableAccessor.getTableRegionsAndLocations(TEST_UTIL.getConnection(), tablename);
tableRegionsInMaster =
master.getAssignmentManager().getRegionStates().getRegionsOfTable(tablename);
- LOG.info(tableRegionsInMaster);
- LOG.info(tableRegionsInMeta);
+ LOG.info(Objects.toString(tableRegionsInMaster));
+ LOG.info(Objects.toString(tableRegionsInMeta));
int tableRegionsInMetaSize = tableRegionsInMeta.size();
int tableRegionsInMasterSize = tableRegionsInMaster.size();
if (tableRegionsInMetaSize == expectedRegionNum
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionOpen.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionOpen.java
index 1f8d16be1a..fb3607a162 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionOpen.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionOpen.java
@@ -26,8 +26,6 @@ import java.io.IOException;
import java.util.List;
import java.util.concurrent.ThreadPoolExecutor;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -51,12 +49,15 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import static org.junit.Assert.fail;
@Category({MediumTests.class, RegionServerTests.class})
public class TestRegionOpen {
@SuppressWarnings("unused")
- private static final Log LOG = LogFactory.getLog(TestRegionOpen.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRegionOpen.class);
private static final int NB_SERVERS = 1;
private static final HBaseTestingUtility HTU = new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicaFailover.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicaFailover.java
index 7a6e2fb8a5..3889e55a27 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicaFailover.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicaFailover.java
@@ -26,9 +26,6 @@ import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
@@ -40,7 +37,6 @@ import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Consistency;
import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.RpcRetryingCallerImpl;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.replication.regionserver.TestRegionReplicaReplicationEndpoint;
import org.apache.hadoop.hbase.testclassification.LargeTests;
@@ -48,25 +44,22 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
-import org.apache.log4j.Level;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tests failover of secondary region replicas.
*/
@Category(LargeTests.class)
public class TestRegionReplicaFailover {
-
- private static final Log LOG = LogFactory.getLog(TestRegionReplicaReplicationEndpoint.class);
-
- static {
- ((Log4JLogger)RpcRetryingCallerImpl.LOG).getLogger().setLevel(Level.ALL);
- }
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestRegionReplicaReplicationEndpoint.class);
private static final HBaseTestingUtility HTU = new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java
index 41bf11e249..276eee9c69 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java
@@ -30,8 +30,6 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
@@ -57,7 +55,8 @@ import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
@@ -68,7 +67,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
*/
@Category({RegionServerTests.class, MediumTests.class})
public class TestRegionReplicas {
- private static final Log LOG = LogFactory.getLog(TestRegionReplicas.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRegionReplicas.class);
private static final int NB_SERVERS = 1;
private static Table table;
@@ -251,7 +250,7 @@ public class TestRegionReplicas {
LOG.info("Flushing primary region");
HRegion region = getRS().getRegionByEncodedName(hriPrimary.getEncodedName());
region.flush(true);
- HRegion primaryRegion = (HRegion) region;
+ HRegion primaryRegion = region;
// ensure that chore is run
LOG.info("Sleeping for " + (4 * refreshPeriod));
@@ -347,7 +346,7 @@ public class TestRegionReplicas {
if (key == endKey) key = startKey;
}
} catch (Exception ex) {
- LOG.warn(ex);
+ LOG.warn(ex.toString(), ex);
exceptions[0].compareAndSet(null, ex);
}
}
@@ -367,7 +366,7 @@ public class TestRegionReplicas {
}
}
} catch (Exception ex) {
- LOG.warn(ex);
+ LOG.warn(ex.toString(), ex);
exceptions[1].compareAndSet(null, ex);
}
}
@@ -477,7 +476,7 @@ public class TestRegionReplicas {
int sum = 0;
for (HStoreFile sf : ((HStore) secondaryRegion.getStore(f)).getStorefiles()) {
// Our file does not exist anymore. was moved by the compaction above.
- LOG.debug(getRS().getFileSystem().exists(sf.getPath()));
+ LOG.debug(Boolean.toString(getRS().getFileSystem().exists(sf.getPath())));
Assert.assertFalse(getRS().getFileSystem().exists(sf.getPath()));
HFileScanner scanner = sf.getReader().getScanner(false, false);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasAreDistributed.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasAreDistributed.java
index d6b7e6f424..22e9dd4d67 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasAreDistributed.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasAreDistributed.java
@@ -25,8 +25,6 @@ import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
@@ -43,11 +41,13 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ RegionServerTests.class, MediumTests.class })
public class TestRegionReplicasAreDistributed {
- private static final Log LOG = LogFactory.getLog(TestRegionReplicasAreDistributed.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRegionReplicasAreDistributed.class);
private static final int NB_SERVERS = 3;
private static Table table;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasWithModifyTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasWithModifyTable.java
index 45536f2b0b..d1bf773655 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasWithModifyTable.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasWithModifyTable.java
@@ -22,8 +22,6 @@ import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
@@ -43,9 +41,6 @@ import org.junit.rules.TestName;
@Category({ RegionServerTests.class, MediumTests.class })
public class TestRegionReplicasWithModifyTable {
-
- private static final Log LOG = LogFactory.getLog(TestRegionReplicasWithModifyTable.class);
-
private static final int NB_SERVERS = 3;
private static Table table;
private static final byte[] row = "TestRegionReplicasWithModifyTable".getBytes();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasWithRestartScenarios.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasWithRestartScenarios.java
index 1646a66a04..dd09359642 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasWithRestartScenarios.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasWithRestartScenarios.java
@@ -25,8 +25,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -50,10 +48,14 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({RegionServerTests.class, MediumTests.class})
public class TestRegionReplicasWithRestartScenarios {
- private static final Log LOG = LogFactory.getLog(TestRegionReplicasWithRestartScenarios.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestRegionReplicasWithRestartScenarios.class);
+
@Rule public TestName name = new TestName();
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().
withTimeout(this.getClass()).
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerAbort.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerAbort.java
index 39339aa2ef..301f509de6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerAbort.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerAbort.java
@@ -24,8 +24,6 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -62,6 +60,8 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Optional;
@@ -77,7 +77,7 @@ import static org.junit.Assert.assertTrue;
public class TestRegionServerAbort {
private static final byte[] FAMILY_BYTES = Bytes.toBytes("f");
- private static final Log LOG = LogFactory.getLog(TestRegionServerAbort.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRegionServerAbort.class);
private HBaseTestingUtility testUtil;
private Configuration conf;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerHostname.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerHostname.java
index abcc497057..e6f3a24ee6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerHostname.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerHostname.java
@@ -27,8 +27,6 @@ import java.util.Enumeration;
import java.util.List;
import java.util.Locale;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -41,13 +39,15 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tests for the hostname specification by region server
*/
@Category({RegionServerTests.class, MediumTests.class})
public class TestRegionServerHostname {
- private static final Log LOG = LogFactory.getLog(TestRegionServerHostname.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRegionServerHostname.class);
private HBaseTestingUtility TEST_UTIL;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
index 6b2d4a1fbe..15e35a5457 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
@@ -25,8 +25,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.CompatibilityFactory;
@@ -61,8 +59,6 @@ import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Threads;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
@@ -74,10 +70,12 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({RegionServerTests.class, LargeTests.class})
public class TestRegionServerMetrics {
- private static final Log LOG = LogFactory.getLog(TestRegionServerMetrics.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRegionServerMetrics.class);
@Rule
public TestName testName = new TestName();
@@ -85,10 +83,6 @@ public class TestRegionServerMetrics {
@ClassRule
public static TestRule timeout = CategoryBasedTimeout.forClass(TestRegionServerMetrics.class);
- static {
- Logger.getLogger("org.apache.hadoop.hbase").setLevel(Level.DEBUG);
- }
-
private static MetricsAssertHelper metricsHelper;
private static MiniHBaseCluster cluster;
private static HRegionServer rs;
@@ -536,7 +530,7 @@ public class TestRegionServerMetrics {
setMobThreshold(region, cf, 0);
// closing the region forces the compaction.discharger to archive the compacted hfiles
- ((HRegion) region).close();
+ region.close();
// metrics are reset by the region initialization
region.initialize();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java
index 6e20612b7e..6c98883474 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.regionserver;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
@@ -50,6 +48,8 @@ import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
@@ -58,7 +58,7 @@ import org.junit.experimental.categories.Category;
@Category({RegionServerTests.class, MediumTests.class})
public class TestRegionServerNoMaster {
- private static final Log LOG = LogFactory.getLog(TestRegionServerNoMaster.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRegionServerNoMaster.class);
private static final int NB_SERVERS = 1;
private static Table table;
private static final byte[] row = "ee".getBytes();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerOnlineConfigChange.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerOnlineConfigChange.java
index 978c792984..13da541bcd 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerOnlineConfigChange.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerOnlineConfigChange.java
@@ -22,8 +22,6 @@ import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
@@ -38,6 +36,8 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
@@ -49,8 +49,8 @@ import java.io.IOException;
@Category({MediumTests.class})
public class TestRegionServerOnlineConfigChange {
- private static final Log LOG =
- LogFactory.getLog(TestRegionServerOnlineConfigChange.class.getName());
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestRegionServerOnlineConfigChange.class.getName());
private static HBaseTestingUtility hbaseTestingUtility = new HBaseTestingUtility();
private static Configuration conf = null;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReadRequestMetrics.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReadRequestMetrics.java
index e554d0dff5..4ff96557d6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReadRequestMetrics.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReadRequestMetrics.java
@@ -18,8 +18,6 @@
*/
package org.apache.hadoop.hbase.regionserver;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.ClusterStatus.Option;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -56,6 +54,8 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Collection;
@@ -71,7 +71,8 @@ import static org.junit.Assert.fail;
@Category(MediumTests.class)
public class TestRegionServerReadRequestMetrics {
- private static final Log LOG = LogFactory.getLog(TestRegionServerReadRequestMetrics.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestRegionServerReadRequestMetrics.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final TableName TABLE_NAME = TableName.valueOf("test");
private static final byte[] CF1 = "c1".getBytes();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java
index f32a87c72a..b72f486355 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java
@@ -22,8 +22,6 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CoordinatedStateManager;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -41,11 +39,13 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category(MediumTests.class)
public class TestRegionServerReportForDuty {
- private static final Log LOG = LogFactory.getLog(TestRegionServerReportForDuty.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRegionServerReportForDuty.class);
private static final long SLEEP_INTERVAL = 500;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java
index a061a8793e..99296f4835 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java
@@ -29,8 +29,6 @@ import java.util.List;
import java.util.Map;
import java.util.NavigableSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
@@ -65,7 +63,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
/**
@@ -73,7 +72,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
*/
@Category({RegionServerTests.class, MediumTests.class})
public class TestReversibleScanners {
- private static final Log LOG = LogFactory.getLog(TestReversibleScanners.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestReversibleScanners.class);
HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static byte[] FAMILYNAME = Bytes.toBytes("testCf");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java
index 5c716f75ba..00e573ff55 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java
@@ -31,8 +31,6 @@ import java.util.List;
import java.util.NavigableSet;
import java.util.TreeSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -56,6 +54,8 @@ import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test a multi-column scanner when there is a Bloom filter false-positive.
@@ -65,8 +65,8 @@ import org.junit.runners.Parameterized.Parameters;
@Category({RegionServerTests.class, SmallTests.class})
public class TestScanWithBloomError {
- private static final Log LOG =
- LogFactory.getLog(TestScanWithBloomError.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestScanWithBloomError.class);
private static final String TABLE_NAME = "ScanWithBloomError";
private static final String FAMILY = "myCF";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java
index 2b36d79577..48a0726f99 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java
@@ -31,8 +31,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
@@ -63,6 +61,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test of a long-lived scanner validating as we go.
@@ -73,7 +73,7 @@ public class TestScanner {
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().
withTimeout(this.getClass()).withLookingForStuckThread(true).build();
- private static final Log LOG = LogFactory.getLog(TestScanner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestScanner.class);
private final static HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU();
private static final byte [] FIRST_ROW = HConstants.EMPTY_START_ROW;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerHeartbeatMessages.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerHeartbeatMessages.java
index dc32cb5c24..7429753e7f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerHeartbeatMessages.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerHeartbeatMessages.java
@@ -27,7 +27,6 @@ import java.util.List;
import java.util.concurrent.Callable;
import org.apache.commons.lang3.exception.ExceptionUtils;
-import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -37,7 +36,6 @@ import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HTestConst;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.TableName;
@@ -46,7 +44,6 @@ import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.ScannerCallable;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.filter.Filter;
@@ -56,7 +53,6 @@ import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.wal.WAL;
-import org.apache.log4j.Level;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
@@ -122,8 +118,6 @@ public class TestScannerHeartbeatMessages {
@BeforeClass
public static void setUpBeforeClass() throws Exception {
- ((Log4JLogger) ScannerCallable.LOG).getLogger().setLevel(Level.ALL);
- ((Log4JLogger) HeartbeatRPCServices.LOG).getLogger().setLevel(Level.ALL);
Configuration conf = TEST_UTIL.getConfiguration();
conf.setStrings(HConstants.REGION_IMPL, HeartbeatHRegion.class.getName());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerRetriableFailure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerRetriableFailure.java
index bd63babdf6..a6aea6a3df 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerRetriableFailure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerRetriableFailure.java
@@ -23,8 +23,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -51,13 +49,15 @@ import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@Category({RegionServerTests.class, LargeTests.class})
public class TestScannerRetriableFailure {
- private static final Log LOG = LogFactory.getLog(TestScannerRetriableFailure.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestScannerRetriableFailure.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java
index 50b1d62df4..287f65e6c0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java
@@ -32,8 +32,6 @@ import java.util.Map;
import java.util.Random;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellUtil;
@@ -56,6 +54,8 @@ import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test various seek optimizations for correctness and check if they are
@@ -65,8 +65,8 @@ import org.junit.runners.Parameterized.Parameters;
@Category({RegionServerTests.class, MediumTests.class})
public class TestSeekOptimizations {
- private static final Log LOG =
- LogFactory.getLog(TestSeekOptimizations.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestSeekOptimizations.class);
// Constants
private static final String FAMILY = "myCF";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitLogWorker.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitLogWorker.java
index f78a19fb0f..613282f951 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitLogWorker.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitLogWorker.java
@@ -28,10 +28,9 @@ import static org.mockito.Mockito.when;
import java.io.IOException;
import java.util.List;
+import java.util.Objects;
import java.util.concurrent.atomic.LongAdder;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hbase.ChoreService;
@@ -56,23 +55,20 @@ import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.ZooDefs.Ids;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({RegionServerTests.class, MediumTests.class})
public class TestSplitLogWorker {
- private static final Log LOG = LogFactory.getLog(TestSplitLogWorker.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSplitLogWorker.class);
private static final int WAIT_TIME = 15000;
private final ServerName MANAGER = ServerName.valueOf("manager,1,1");
- static {
- Logger.getLogger("org.apache.hadoop.hbase").setLevel(Level.DEBUG);
- }
private final static HBaseTestingUtility TEST_UTIL =
new HBaseTestingUtility();
private DummyServer ds;
@@ -430,7 +426,7 @@ public class TestSplitLogWorker {
waitForCounter(SplitLogCounters.tot_wkr_task_acquired_rescan, 0, 1, WAIT_TIME);
List nodes = ZKUtil.listChildrenNoWatch(zkw, zkw.znodePaths.splitLogZNode);
- LOG.debug(nodes);
+ LOG.debug(Objects.toString(nodes));
int num = 0;
for (String node : nodes) {
num++;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java
index 619ffd081f..a86ec2f193 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java
@@ -34,8 +34,6 @@ import java.util.Optional;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -101,7 +99,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -115,7 +114,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProto
@Category({RegionServerTests.class, LargeTests.class})
@SuppressWarnings("deprecation")
public class TestSplitTransactionOnCluster {
- private static final Log LOG = LogFactory.getLog(TestSplitTransactionOnCluster.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSplitTransactionOnCluster.class);
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()).
withLookingForStuckThread(true).build();
private Admin admin = null;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitWalDataLoss.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitWalDataLoss.java
index 4db5734a84..073845e06e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitWalDataLoss.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitWalDataLoss.java
@@ -27,8 +27,6 @@ import java.util.Collection;
import java.util.Map;
import org.apache.commons.lang3.mutable.MutableBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.DroppedSnapshotException;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
@@ -56,6 +54,8 @@ import org.junit.experimental.categories.Category;
import org.mockito.Matchers;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Testcase for https://issues.apache.org/jira/browse/HBASE-13811
@@ -63,7 +63,7 @@ import org.mockito.stubbing.Answer;
@Category({ MediumTests.class })
public class TestSplitWalDataLoss {
- private static final Log LOG = LogFactory.getLog(TestSplitWalDataLoss.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSplitWalDataLoss.class);
private final HBaseTestingUtility testUtil = new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
index f4758e7f49..1f5db50b35 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
@@ -37,8 +37,6 @@ import java.util.OptionalInt;
import java.util.TreeSet;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.Cell;
@@ -63,11 +61,13 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
// Can't be small as it plays with EnvironmentEdgeManager
@Category({RegionServerTests.class, MediumTests.class})
public class TestStoreScanner {
- private static final Log LOG = LogFactory.getLog(TestStoreScanner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestStoreScanner.class);
@Rule public TestName name = new TestName();
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()).
withLookingForStuckThread(true).build();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALLockup.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALLockup.java
index 5278f3f7ac..62d22d201b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALLockup.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALLockup.java
@@ -26,8 +26,6 @@ import java.util.TreeMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -66,6 +64,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Testing for lock up of WAL subsystem.
@@ -73,7 +73,7 @@ import org.mockito.Mockito;
*/
@Category({MediumTests.class})
public class TestWALLockup {
- private static final Log LOG = LogFactory.getLog(TestWALLockup.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestWALLockup.class);
@Rule public TestName name = new TestName();
private static final String COLUMN_FAMILY = "MyCF";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALMonotonicallyIncreasingSeqId.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALMonotonicallyIncreasingSeqId.java
index b5378909b3..99db2087e8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALMonotonicallyIncreasingSeqId.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALMonotonicallyIncreasingSeqId.java
@@ -26,8 +26,6 @@ import java.util.Arrays;
import java.util.List;
import java.util.concurrent.CountDownLatch;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -61,6 +59,8 @@ import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameter;
import org.junit.runners.Parameterized.Parameters;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test for HBASE-17471.
@@ -76,7 +76,7 @@ import org.junit.runners.Parameterized.Parameters;
@RunWith(Parameterized.class)
@Category({ RegionServerTests.class, SmallTests.class })
public class TestWALMonotonicallyIncreasingSeqId {
- private final Log LOG = LogFactory.getLog(getClass());
+ private final Logger LOG = LoggerFactory.getLogger(getClass());
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static Path testDir = TEST_UTIL.getDataTestDir("TestWALMonotonicallyIncreasingSeqId");
private WALFactory wals;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java
index 9514f9c564..380de9e92e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java
@@ -26,8 +26,6 @@ import java.util.Iterator;
import java.util.List;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestCase;
@@ -43,10 +41,12 @@ import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({RegionServerTests.class, SmallTests.class})
public class TestWideScanner extends HBaseTestCase {
- private static final Log LOG = LogFactory.getLog(TestWideScanner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestWideScanner.class);
static final byte[] A = Bytes.toBytes("A");
static final byte[] B = Bytes.toBytes("B");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestCompactionScanQueryMatcher.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestCompactionScanQueryMatcher.java
index 3d5ed44e1c..30ab007f69 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestCompactionScanQueryMatcher.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestCompactionScanQueryMatcher.java
@@ -25,8 +25,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeepDeletedCells;
import org.apache.hadoop.hbase.KeyValue;
@@ -41,11 +39,13 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ RegionServerTests.class, SmallTests.class })
public class TestCompactionScanQueryMatcher extends AbstractTestScanQueryMatcher {
- private static final Log LOG = LogFactory.getLog(TestCompactionScanQueryMatcher.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestCompactionScanQueryMatcher.class);
@Test
public void testMatch_PartialRangeDropDeletes() throws Exception {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java
index 4cde038b20..aec93cbb3a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java
@@ -24,8 +24,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.PrivateCellUtil;
@@ -38,11 +36,13 @@ import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ RegionServerTests.class, SmallTests.class })
public class TestUserScanQueryMatcher extends AbstractTestScanQueryMatcher {
- private static final Log LOG = LogFactory.getLog(TestUserScanQueryMatcher.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestUserScanQueryMatcher.class);
/**
* This is a cryptic test. It is checking that we don't include a fake cell, one that has a
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestCompactionWithThroughputController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestCompactionWithThroughputController.java
index fe33d860ab..1d3834d870 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestCompactionWithThroughputController.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestCompactionWithThroughputController.java
@@ -24,8 +24,6 @@ import java.io.IOException;
import java.util.List;
import java.util.concurrent.ThreadLocalRandom;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.MiniHBaseCluster;
@@ -52,11 +50,13 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.JVMClusterUtil;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ RegionServerTests.class, MediumTests.class })
public class TestCompactionWithThroughputController {
-
- private static final Log LOG = LogFactory.getLog(TestCompactionWithThroughputController.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestCompactionWithThroughputController.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestFlushWithThroughputController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestFlushWithThroughputController.java
index 3c1228e2f9..cb6155845c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestFlushWithThroughputController.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestFlushWithThroughputController.java
@@ -18,8 +18,6 @@ import java.util.List;
import java.util.Random;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.MiniHBaseCluster;
@@ -48,10 +46,13 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category(MediumTests.class)
public class TestFlushWithThroughputController {
- private static final Log LOG = LogFactory.getLog(TestFlushWithThroughputController.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestFlushWithThroughputController.class);
private static final double EPSILON = 1E-6;
private HBaseTestingUtility hbtu;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java
index 9481018ace..093a512a74 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java
@@ -33,8 +33,6 @@ import java.util.TreeMap;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -74,10 +72,12 @@ import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public abstract class AbstractTestFSWAL {
- protected static final Log LOG = LogFactory.getLog(AbstractTestFSWAL.class);
+ protected static final Logger LOG = LoggerFactory.getLogger(AbstractTestFSWAL.class);
protected static Configuration CONF;
protected static FileSystem FS;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRollPeriod.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRollPeriod.java
index 85fcaff6b6..84a6216949 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRollPeriod.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRollPeriod.java
@@ -22,8 +22,6 @@ import static org.junit.Assert.assertFalse;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName;
@@ -35,12 +33,14 @@ import org.apache.hadoop.hbase.wal.WAL;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tests that verifies that the log is forced to be rolled every "hbase.regionserver.logroll.period"
*/
public abstract class AbstractTestLogRollPeriod {
- private static final Log LOG = LogFactory.getLog(AbstractTestLogRollPeriod.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AbstractTestLogRollPeriod.class);
protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@@ -110,7 +110,7 @@ public abstract class AbstractTestLogRollPeriod {
Thread.sleep(LOG_ROLL_PERIOD / 16);
}
} catch (Exception e) {
- LOG.warn(e);
+ LOG.warn(e.toString(), e);
}
}
};
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRolling.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRolling.java
index 68d71b0958..a323db6ac4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRolling.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRolling.java
@@ -23,8 +23,6 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -57,12 +55,14 @@ import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test log deletion as logs are rolled.
*/
public abstract class AbstractTestLogRolling {
- private static final Log LOG = LogFactory.getLog(AbstractTestLogRolling.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AbstractTestLogRolling.class);
protected HRegionServer server;
protected String tableName;
protected byte[] value;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java
index 60951aa068..ededcf3cfa 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java
@@ -44,8 +44,6 @@ import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
@@ -111,12 +109,14 @@ import org.junit.rules.TestName;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test replay of edits out of a WAL split.
*/
public abstract class AbstractTestWALReplay {
- private static final Log LOG = LogFactory.getLog(AbstractTestWALReplay.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AbstractTestWALReplay.class);
static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private final EnvironmentEdge ee = EnvironmentEdgeManager.getDelegate();
private Path hbaseRootDir = null;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
index 05addd2e44..cb45f49ce1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
@@ -144,7 +144,7 @@ public class TestFSHLog extends AbstractTestFSWAL {
try {
holdAppend.await();
} catch (InterruptedException e) {
- LOG.error(e);
+ LOG.error(e.toString(), e);
}
}
}
@@ -170,7 +170,7 @@ public class TestFSHLog extends AbstractTestFSWAL {
region.put(new Put(b).addColumn(b, b,b));
putFinished.countDown();
} catch (IOException e) {
- LOG.error(e);
+ LOG.error(e.toString(), e);
}
}
});
@@ -187,7 +187,7 @@ public class TestFSHLog extends AbstractTestFSWAL {
LOG.info("Flush succeeded:" + flushResult.isFlushSucceeded());
flushFinished.countDown();
} catch (IOException e) {
- LOG.error(e);
+ LOG.error(e.toString(), e);
}
}
});
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java
index 665ceeb9a9..6c2fd9f4fd 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java
@@ -22,8 +22,6 @@ import java.io.IOException;
import java.util.NavigableMap;
import java.util.TreeMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -39,6 +37,7 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl;
import org.apache.hadoop.hbase.testclassification.MediumTests;
@@ -58,6 +57,8 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tests for conditions that should trigger RegionServer aborts when
@@ -65,7 +66,7 @@ import org.junit.experimental.categories.Category;
*/
@Category({RegionServerTests.class, MediumTests.class})
public class TestLogRollAbort {
- private static final Log LOG = LogFactory.getLog(AbstractTestLogRolling.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AbstractTestLogRolling.class);
private static MiniDFSCluster dfsCluster;
private static Admin admin;
private static MiniHBaseCluster cluster;
@@ -164,7 +165,7 @@ public class TestLogRollAbort {
// not reliable now that sync plays a roll in wall rolling. The above puts also now call
// sync.
} catch (Throwable t) {
- LOG.fatal("FAILED TEST: Got wrong exception", t);
+ LOG.error(HBaseMarkers.FATAL, "FAILED TEST: Got wrong exception", t);
}
} finally {
table.close();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java
index dd4ca097a8..12278eb3c0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java
@@ -30,8 +30,6 @@ import java.util.List;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
@@ -48,7 +46,6 @@ import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.regionserver.HRegion;
-import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests;
import org.apache.hadoop.hbase.util.Bytes;
@@ -64,11 +61,13 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ VerySlowRegionServerTests.class, LargeTests.class })
public class TestLogRolling extends AbstractTestLogRolling {
- private static final Log LOG = LogFactory.getLog(TestLogRolling.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestLogRolling.class);
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()).
withLookingForStuckThread(true).build();
@@ -355,7 +354,7 @@ public class TestLogRolling extends AbstractTestLogRolling {
// a failed append could not be followed by a successful
// sync. What is coming out here is a failed sync, a sync
// that used to 'pass'.
- LOG.info(e);
+ LOG.info(e.toString(), e);
}
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollingNoCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollingNoCluster.java
index c990680b28..054910056a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollingNoCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollingNoCluster.java
@@ -22,8 +22,7 @@ import java.io.IOException;
import java.util.NavigableMap;
import java.util.TreeMap;
import java.util.concurrent.ThreadLocalRandom;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
@@ -47,6 +46,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test many concurrent appenders to an WAL while rolling the log.
@@ -119,7 +120,7 @@ public class TestLogRollingNoCluster {
* Appender thread. Appends to passed wal file.
*/
static class Appender extends Thread {
- private final Log log;
+ private final Logger log;
private final WAL wal;
private final int count;
private Exception e = null;
@@ -128,7 +129,7 @@ public class TestLogRollingNoCluster {
super("" + index);
this.wal = wal;
this.count = count;
- this.log = LogFactory.getLog("Appender:" + getName());
+ this.log = LoggerFactory.getLogger("Appender:" + getName());
}
/**
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java
index f44c2ea7b3..3a5ca0e81a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java
@@ -31,8 +31,6 @@ import java.util.Optional;
import java.util.Random;
import java.util.concurrent.CountDownLatch;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -82,11 +80,13 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ReplicationTests.class, LargeTests.class})
public class TestMasterReplication {
- private static final Log LOG = LogFactory.getLog(TestReplicationBase.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestReplicationBase.class);
private Configuration baseConfiguration;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMultiSlaveReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMultiSlaveReplication.java
index 871cd190bc..9da0745f83 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMultiSlaveReplication.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMultiSlaveReplication.java
@@ -30,8 +30,6 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.concurrent.CountDownLatch;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.*;
@@ -53,11 +51,13 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ReplicationTests.class, LargeTests.class})
public class TestMultiSlaveReplication {
- private static final Log LOG = LogFactory.getLog(TestReplicationBase.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestReplicationBase.class);
private static Configuration conf1;
private static Configuration conf2;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestNamespaceReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestNamespaceReplication.java
index 0d7a92d977..ed711233a6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestNamespaceReplication.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestNamespaceReplication.java
@@ -30,8 +30,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
@@ -51,11 +49,13 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MediumTests.class})
public class TestNamespaceReplication extends TestReplicationBase {
- private static final Log LOG = LogFactory.getLog(TestNamespaceReplication.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestNamespaceReplication.class);
private static String ns1 = "ns1";
private static String ns2 = "ns2";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java
index e9c352d058..bacda634e3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java
@@ -25,8 +25,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -57,13 +55,15 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.*;
@Category({FlakeyTests.class, LargeTests.class})
public class TestPerTableCFReplication {
- private static final Log LOG = LogFactory.getLog(TestPerTableCFReplication.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestPerTableCFReplication.class);
private static Configuration conf1;
private static Configuration conf2;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java
index f7d1009e6b..0592e2d701 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java
@@ -23,8 +23,6 @@ import java.util.List;
import java.util.NavigableMap;
import java.util.TreeMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -45,6 +43,8 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.runners.Parameterized.Parameter;
import org.junit.runners.Parameterized.Parameters;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This class is only a base for other integration-level replication tests.
@@ -58,7 +58,7 @@ public class TestReplicationBase {
((Log4JLogger) ReplicationSource.LOG).getLogger().setLevel(Level.ALL);
}*/
- private static final Log LOG = LogFactory.getLog(TestReplicationBase.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestReplicationBase.class);
protected static Configuration conf1 = HBaseConfiguration.create();
protected static Configuration conf2;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationChangingPeerRegionservers.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationChangingPeerRegionservers.java
index 557ed99e40..abf8d261b6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationChangingPeerRegionservers.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationChangingPeerRegionservers.java
@@ -24,8 +24,6 @@ import static org.junit.Assert.fail;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
@@ -39,14 +37,16 @@ import org.apache.hadoop.hbase.util.JVMClusterUtil;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test handling of changes to the number of a peer's regionservers.
*/
@Category({ReplicationTests.class, LargeTests.class})
public class TestReplicationChangingPeerRegionservers extends TestReplicationBase {
-
- private static final Log LOG = LogFactory.getLog(TestReplicationChangingPeerRegionservers.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestReplicationChangingPeerRegionservers.class);
/**
* @throws java.lang.Exception
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDisableInactivePeer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDisableInactivePeer.java
index c9579d607d..1675496aba 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDisableInactivePeer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDisableInactivePeer.java
@@ -18,8 +18,6 @@
*/
package org.apache.hadoop.hbase.replication;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
@@ -28,14 +26,16 @@ import org.apache.hadoop.hbase.testclassification.ReplicationTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.fail;
@Category({ReplicationTests.class, LargeTests.class})
public class TestReplicationDisableInactivePeer extends TestReplicationBase {
-
- private static final Log LOG = LogFactory.getLog(TestReplicationDisableInactivePeer.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestReplicationDisableInactivePeer.class);
/**
* Test disabling an inactive peer. Add a peer which is inactive, trying to
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDroppedTables.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDroppedTables.java
index df9cff25ec..044e55d7ef 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDroppedTables.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDroppedTables.java
@@ -18,8 +18,6 @@
*/
package org.apache.hadoop.hbase.replication;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
@@ -40,6 +38,8 @@ import org.apache.hadoop.hbase.util.JVMClusterUtil;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.fail;
@@ -49,7 +49,7 @@ import java.util.List;
@Category(LargeTests.class)
public class TestReplicationDroppedTables extends TestReplicationBase {
- private static final Log LOG = LogFactory.getLog(TestReplicationDroppedTables.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestReplicationDroppedTables.class);
/**
* @throws java.lang.Exception
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java
index b76ebb1a2c..c5f83419bf 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java
@@ -30,8 +30,6 @@ import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.Waiter;
import org.apache.hadoop.hbase.client.Connection;
@@ -60,13 +58,15 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tests ReplicationSource and ReplicationEndpoint interactions
*/
@Category({ReplicationTests.class, MediumTests.class})
public class TestReplicationEndpoint extends TestReplicationBase {
- private static final Log LOG = LogFactory.getLog(TestReplicationEndpoint.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestReplicationEndpoint.class);
static int numRegionServers;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillRS.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillRS.java
index 5739aee16a..30cd860290 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillRS.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillRS.java
@@ -18,8 +18,6 @@
*/
package org.apache.hadoop.hbase.replication;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.UnknownScannerException;
import org.apache.hadoop.hbase.client.Result;
@@ -28,13 +26,15 @@ import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.ReplicationTests;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.fail;
@Category({ReplicationTests.class, LargeTests.class})
public class TestReplicationKillRS extends TestReplicationBase {
- private static final Log LOG = LogFactory.getLog(TestReplicationKillRS.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestReplicationKillRS.class);
/**
* Load up 1 tables over 2 region servers and kill a source during
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSource.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSource.java
index 122860532c..48d8924377 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSource.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSource.java
@@ -29,8 +29,6 @@ import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -61,13 +59,15 @@ import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ReplicationTests.class, MediumTests.class})
public class TestReplicationSource {
- private static final Log LOG =
- LogFactory.getLog(TestReplicationSource.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestReplicationSource.class);
private final static HBaseTestingUtility TEST_UTIL =
new HBaseTestingUtility();
private final static HBaseTestingUtility TEST_UTIL_PEER =
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStateBasic.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStateBasic.java
index 15d15b3daf..29c093000e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStateBasic.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStateBasic.java
@@ -23,8 +23,6 @@ import static org.junit.Assert.*;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.util.Pair;
@@ -32,6 +30,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKConfig;
import org.apache.zookeeper.KeeperException;
import org.junit.Before;
import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* White box testing for replication state interfaces. Implementations should extend this class, and
@@ -60,7 +60,7 @@ public abstract class TestReplicationStateBasic {
protected static final int ZK_MAX_COUNT = 300;
protected static final int ZK_SLEEP_INTERVAL = 100; // millis
- private static final Log LOG = LogFactory.getLog(TestReplicationStateBasic.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestReplicationStateBasic.class);
@Before
public void setUp() {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStateZKImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStateZKImpl.java
index 15fc78f3fd..231d655e2c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStateZKImpl.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStateZKImpl.java
@@ -23,8 +23,6 @@ import static org.junit.Assert.fail;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hbase.ChoreService;
@@ -51,11 +49,13 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ReplicationTests.class, MediumTests.class})
public class TestReplicationStateZKImpl extends TestReplicationStateBasic {
- private static final Log LOG = LogFactory.getLog(TestReplicationStateZKImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestReplicationStateZKImpl.class);
private static Configuration conf;
private static HBaseTestingUtility utility;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStatus.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStatus.java
index 7a2377fb9d..8532dff086 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStatus.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStatus.java
@@ -23,8 +23,6 @@ import static org.junit.Assert.assertTrue;
import java.util.EnumSet;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.ServerLoad;
import org.apache.hadoop.hbase.ServerName;
@@ -37,10 +35,12 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.JVMClusterUtil;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ReplicationTests.class, MediumTests.class})
public class TestReplicationStatus extends TestReplicationBase {
- private static final Log LOG = LogFactory.getLog(TestReplicationStatus.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestReplicationStatus.class);
private static final String PEER_ID = "2";
/**
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java
index edfff9a90e..0a602ada73 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java
@@ -17,8 +17,6 @@
*/
package org.apache.hadoop.hbase.replication;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
@@ -38,6 +36,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
@@ -47,7 +47,7 @@ import static org.junit.Assert.assertEquals;
@Category({ReplicationTests.class, LargeTests.class})
public class TestReplicationSyncUpTool extends TestReplicationBase {
- private static final Log LOG = LogFactory.getLog(TestReplicationSyncUpTool.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestReplicationSyncUpTool.class);
private static final TableName t1_su = TableName.valueOf("t1_syncup");
private static final TableName t2_su = TableName.valueOf("t2_syncup");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationTrackerZKImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationTrackerZKImpl.java
index c2481c4c11..a04d5247d8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationTrackerZKImpl.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationTrackerZKImpl.java
@@ -27,8 +27,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hbase.ChoreService;
@@ -52,6 +50,8 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This class tests the ReplicationTrackerZKImpl class and ReplicationListener interface. One
@@ -63,7 +63,7 @@ import org.junit.experimental.categories.Category;
@Category({ReplicationTests.class, MediumTests.class})
public class TestReplicationTrackerZKImpl {
- private static final Log LOG = LogFactory.getLog(TestReplicationTrackerZKImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestReplicationTrackerZKImpl.class);
private static Configuration conf;
private static HBaseTestingUtility utility;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java
index 924a72cd77..e3c1959ac4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java
@@ -26,8 +26,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
@@ -66,11 +64,13 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ReplicationTests.class, LargeTests.class})
public class TestReplicationWithTags {
- private static final Log LOG = LogFactory.getLog(TestReplicationWithTags.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestReplicationWithTags.class);
private static final byte TAG_TYPE = 1;
private static Configuration conf1 = HBaseConfiguration.create();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestSerialReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestSerialReplication.java
index 1c5aa7190e..3185a5be32 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestSerialReplication.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestSerialReplication.java
@@ -26,8 +26,6 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -61,10 +59,12 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ ReplicationTests.class, LargeTests.class })
public class TestSerialReplication {
- private static final Log LOG = LogFactory.getLog(TestSerialReplication.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSerialReplication.class);
private static Configuration conf1;
private static Configuration conf2;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/master/TestTableCFsUpdater.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/master/TestTableCFsUpdater.java
index e78abfb74b..2993043b5b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/master/TestTableCFsUpdater.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/master/TestTableCFsUpdater.java
@@ -20,8 +20,6 @@
package org.apache.hadoop.hbase.replication.master;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -41,6 +39,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.Map;
@@ -52,7 +52,7 @@ import static org.junit.Assert.assertTrue;
@Category({ReplicationTests.class, SmallTests.class})
public class TestTableCFsUpdater extends ReplicationPeerConfigUpgrader {
- private static final Log LOG = LogFactory.getLog(TestTableCFsUpdater.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestTableCFsUpdater.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static ZKWatcher zkw = null;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestGlobalThrottler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestGlobalThrottler.java
index 9b1648f631..fab125b8bc 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestGlobalThrottler.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestGlobalThrottler.java
@@ -24,8 +24,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -56,10 +54,12 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ ReplicationTests.class, LargeTests.class })
public class TestGlobalThrottler {
- private static final Log LOG = LogFactory.getLog(TestGlobalThrottler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestGlobalThrottler.class);
private static Configuration conf1;
private static Configuration conf2;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpoint.java
index 7e0f09024b..41272efbb7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpoint.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpoint.java
@@ -26,9 +26,6 @@ import java.util.List;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
@@ -41,7 +38,6 @@ import org.apache.hadoop.hbase.client.ClusterConnection;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.RegionLocator;
-import org.apache.hadoop.hbase.client.RpcRetryingCallerImpl;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.replication.ReplicationAdmin;
import org.apache.hadoop.hbase.regionserver.HRegion;
@@ -56,7 +52,6 @@ import org.apache.hadoop.hbase.testclassification.FlakeyTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
import org.apache.hadoop.hbase.zookeeper.ZKConfig;
-import org.apache.log4j.Level;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Rule;
@@ -65,6 +60,8 @@ import org.junit.experimental.categories.Category;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tests RegionReplicaReplicationEndpoint class by setting up region replicas and verifying
@@ -72,12 +69,8 @@ import org.junit.rules.TestName;
*/
@Category({FlakeyTests.class, MediumTests.class})
public class TestRegionReplicaReplicationEndpoint {
-
- private static final Log LOG = LogFactory.getLog(TestRegionReplicaReplicationEndpoint.class);
-
- static {
- ((Log4JLogger) RpcRetryingCallerImpl.LOG).getLogger().setLevel(Level.ALL);
- }
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestRegionReplicaReplicationEndpoint.class);
private static final int NB_SERVERS = 2;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java
index dc0ca08422..039b667c40 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java
@@ -33,8 +33,6 @@ import java.util.Random;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
@@ -71,10 +69,12 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ReplicationTests.class, MediumTests.class})
public class TestReplicationSink {
- private static final Log LOG = LogFactory.getLog(TestReplicationSink.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestReplicationSink.class);
private static final int BATCH_SIZE = 10;
protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java
index c4d079d892..307ea7f42d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java
@@ -40,8 +40,7 @@ import java.util.TreeSet;
import java.util.UUID;
import java.util.concurrent.CountDownLatch;
import java.util.stream.Collectors;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -94,7 +93,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -108,8 +108,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescr
@Category({ReplicationTests.class, MediumTests.class})
public abstract class TestReplicationSourceManager {
- protected static final Log LOG =
- LogFactory.getLog(TestReplicationSourceManager.class);
+ protected static final Logger LOG =
+ LoggerFactory.getLogger(TestReplicationSourceManager.class);
protected static Configuration conf;
@@ -274,7 +274,7 @@ public abstract class TestReplicationSourceManager {
if(i > 1 && i % 20 == 0) {
wal.rollWriter();
}
- LOG.info(i);
+ LOG.info(Long.toString(i));
final long txid = wal.append(
hri,
new WALKeyImpl(hri.getEncodedNameAsBytes(), test, System.currentTimeMillis(), mvcc, scopes),
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationThrottler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationThrottler.java
index 8e2618c1bd..eb6b9cf287 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationThrottler.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationThrottler.java
@@ -21,18 +21,18 @@ package org.apache.hadoop.hbase.replication.regionserver;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.testclassification.ReplicationTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ReplicationTests.class, SmallTests.class})
public class TestReplicationThrottler {
- private static final Log LOG = LogFactory.getLog(TestReplicationThrottler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestReplicationThrottler.class);
/**
* unit test for throttling
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicator.java
index 4a46074005..40b76d4c4d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicator.java
@@ -24,8 +24,6 @@ import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName;
@@ -51,13 +49,15 @@ import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.Ignore;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category(MediumTests.class)
@Ignore("Flaky, needs to be rewritten, see HBASE-19125")
public class TestReplicator extends TestReplicationBase {
- static final Log LOG = LogFactory.getLog(TestReplicator.class);
+ static final Logger LOG = LoggerFactory.getLogger(TestReplicator.class);
static final int NUM_ROWS = 10;
@BeforeClass
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java
index 4d402b7310..655d9ba4f0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java
@@ -17,13 +17,12 @@
*/
package org.apache.hadoop.hbase.security;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Strings;
import org.apache.hadoop.security.UserGroupInformation;
@@ -32,7 +31,7 @@ import java.net.InetAddress;
@InterfaceAudience.Private
public class HBaseKerberosUtils {
- private static final Log LOG = LogFactory.getLog(HBaseKerberosUtils.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HBaseKerberosUtils.class);
public static final String KRB_PRINCIPAL = "hbase.regionserver.kerberos.principal";
public static final String MASTER_KRB_PRINCIPAL = "hbase.master.kerberos.principal";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUser.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUser.java
index 0af7c43190..5986e0b826 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUser.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUser.java
@@ -23,8 +23,6 @@ import java.security.PrivilegedAction;
import java.security.PrivilegedExceptionAction;
import org.apache.commons.lang3.SystemUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -33,14 +31,15 @@ import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.security.UserGroupInformation;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet;
import static org.junit.Assert.*;
@Category({SecurityTests.class, SmallTests.class})
public class TestUser {
- private static final Log LOG = LogFactory.getLog(TestUser.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestUser.class);
@Test
public void testCreateUserForTestingGroupCache() throws Exception {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java
index e477026bf3..cc01d62c88 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java
@@ -30,8 +30,7 @@ import java.util.concurrent.CountDownLatch;
import com.google.protobuf.BlockingRpcChannel;
import com.google.protobuf.ServiceException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -67,6 +66,8 @@ import org.apache.hadoop.hbase.security.access.Permission.Action;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
@@ -76,7 +77,7 @@ import static org.junit.Assert.fail;
*/
public class SecureTestUtil {
- private static final Log LOG = LogFactory.getLog(SecureTestUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SecureTestUtil.class);
private static final int WAIT_TIME = 10000;
public static void configureSuperuser(Configuration conf) throws IOException {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
index 67914658df..138a40e311 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
@@ -36,8 +36,7 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -128,14 +127,14 @@ import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.JVMClusterUtil;
import org.apache.hadoop.hbase.util.Threads;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Performs authorization checks for common operations, according to different
@@ -143,14 +142,7 @@ import org.junit.rules.TestName;
*/
@Category({SecurityTests.class, LargeTests.class})
public class TestAccessController extends SecureTestUtil {
- private static final Log LOG = LogFactory.getLog(TestAccessController.class);
-
- static {
- Logger.getLogger(AccessController.class).setLevel(Level.TRACE);
- Logger.getLogger(AccessControlFilter.class).setLevel(Level.TRACE);
- Logger.getLogger(TableAuthManager.class).setLevel(Level.TRACE);
- }
-
+ private static final Logger LOG = LoggerFactory.getLogger(TestAccessController.class);
private static TableName TEST_TABLE = TableName.valueOf("testtable1");
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static Configuration conf;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController2.java
index f9d0b1c219..7b7a150fef 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController2.java
@@ -26,8 +26,6 @@ import static org.junit.Assert.assertTrue;
import java.util.Arrays;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -64,10 +62,12 @@ import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({SecurityTests.class, LargeTests.class})
public class TestAccessController2 extends SecureTestUtil {
- private static final Log LOG = LogFactory.getLog(TestAccessController2.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestAccessController2.class);
private static final byte[] TEST_ROW = Bytes.toBytes("test");
private static final byte[] TEST_FAMILY = Bytes.toBytes("f");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController3.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController3.java
index 11dbbd3469..babba153bf 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController3.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController3.java
@@ -21,8 +21,6 @@ import static org.apache.hadoop.hbase.AuthUtil.toGroupEntry;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
@@ -47,14 +45,14 @@ import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.SecurityTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.JVMClusterUtil;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Performs checks for reference counting w.r.t. TableAuthManager which is used by
@@ -65,14 +63,7 @@ import org.junit.rules.TestName;
*/
@Category({SecurityTests.class, MediumTests.class})
public class TestAccessController3 extends SecureTestUtil {
- private static final Log LOG = LogFactory.getLog(TestAccessController.class);
-
- static {
- Logger.getLogger(AccessController.class).setLevel(Level.TRACE);
- Logger.getLogger(AccessControlFilter.class).setLevel(Level.TRACE);
- Logger.getLogger(TableAuthManager.class).setLevel(Level.TRACE);
- }
-
+ private static final Logger LOG = LoggerFactory.getLogger(TestAccessController.class);
private static TableName TEST_TABLE = TableName.valueOf("testtable1");
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static Configuration conf;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java
index 793f75b3fe..c9a1e6f138 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java
@@ -25,8 +25,6 @@ import java.security.PrivilegedExceptionAction;
import java.util.HashMap;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.AuthUtil;
import org.apache.hadoop.hbase.Coprocessor;
@@ -34,6 +32,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableNotFoundException;
+import org.apache.hadoop.hbase.TestTableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -50,10 +49,7 @@ import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.SecurityTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-import org.apache.hadoop.hbase.TestTableName;
import org.apache.hadoop.hbase.util.Threads;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
@@ -61,16 +57,12 @@ import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({SecurityTests.class, MediumTests.class})
public class TestCellACLWithMultipleVersions extends SecureTestUtil {
- private static final Log LOG = LogFactory.getLog(TestCellACLWithMultipleVersions.class);
-
- static {
- Logger.getLogger(AccessController.class).setLevel(Level.TRACE);
- Logger.getLogger(AccessControlFilter.class).setLevel(Level.TRACE);
- Logger.getLogger(TableAuthManager.class).setLevel(Level.TRACE);
- }
+ private static final Logger LOG = LoggerFactory.getLogger(TestCellACLWithMultipleVersions.class);
@Rule
public TestTableName TEST_TABLE = new TestTableName();
@@ -569,36 +561,36 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
new String[] { user2.getShortName(), AuthUtil.toGroupEntry(GROUP),
USER_OWNER.getShortName() }, Action.READ, Action.WRITE);
Put p = new Put(TEST_ROW1);
- p.addColumn(TEST_FAMILY1, TEST_Q1, (long) 123, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q1, 123, ZERO);
p.setACL(permsU1andOwner);
t.put(p);
p = new Put(TEST_ROW1);
- p.addColumn(TEST_FAMILY1, TEST_Q2, (long) 123, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q2, 123, ZERO);
p.setACL(permsU2andGUandOwner);
t.put(p);
p = new Put(TEST_ROW1);
- p.addColumn(TEST_FAMILY2, TEST_Q1, (long) 123, ZERO);
- p.addColumn(TEST_FAMILY2, TEST_Q2, (long) 123, ZERO);
+ p.addColumn(TEST_FAMILY2, TEST_Q1, 123, ZERO);
+ p.addColumn(TEST_FAMILY2, TEST_Q2, 123, ZERO);
p.setACL(permsU2andGUandOwner);
t.put(p);
p = new Put(TEST_ROW1);
- p.addColumn(TEST_FAMILY2, TEST_Q1, (long) 125, ZERO);
- p.addColumn(TEST_FAMILY2, TEST_Q2, (long) 125, ZERO);
+ p.addColumn(TEST_FAMILY2, TEST_Q1, 125, ZERO);
+ p.addColumn(TEST_FAMILY2, TEST_Q2, 125, ZERO);
p.setACL(permsU1andOwner);
t.put(p);
p = new Put(TEST_ROW1);
- p.addColumn(TEST_FAMILY1, TEST_Q1, (long) 127, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q1, 127, ZERO);
p.setACL(permsU2andGUandOwner);
t.put(p);
p = new Put(TEST_ROW1);
- p.addColumn(TEST_FAMILY1, TEST_Q2, (long) 127, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q2, 127, ZERO);
p.setACL(permsU1andOwner);
t.put(p);
p = new Put(TEST_ROW1);
- p.addColumn(TEST_FAMILY2, TEST_Q1, (long) 129, ZERO);
- p.addColumn(TEST_FAMILY2, TEST_Q2, (long) 129, ZERO);
+ p.addColumn(TEST_FAMILY2, TEST_Q1, 129, ZERO);
+ p.addColumn(TEST_FAMILY2, TEST_Q2, 129, ZERO);
p.setACL(permsU1andOwner);
t.put(p);
}
@@ -675,20 +667,20 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
new String[] { user2.getShortName(), AuthUtil.toGroupEntry(GROUP),
USER_OWNER.getShortName() }, Action.READ, Action.WRITE);
Put p = new Put(TEST_ROW1);
- p.addColumn(TEST_FAMILY1, TEST_Q1, (long) 123, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q1, 123, ZERO);
p.setACL(permsU1andOwner);
t.put(p);
p = new Put(TEST_ROW1);
- p.addColumn(TEST_FAMILY1, TEST_Q2, (long) 123, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q2, 123, ZERO);
p.setACL(permsU2andGUandOwner);
t.put(p);
p = new Put(TEST_ROW1);
- p.addColumn(TEST_FAMILY1, TEST_Q1, (long) 127, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q1, 127, ZERO);
p.setACL(permsU2andGUandOwner);
t.put(p);
p = new Put(TEST_ROW1);
- p.addColumn(TEST_FAMILY1, TEST_Q2, (long) 127, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q2, 127, ZERO);
p.setACL(permsU1andOwner);
t.put(p);
}
@@ -767,20 +759,20 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
permsU2andGUandOwner.put(USER_OWNER.getShortName(), new Permission(Permission.Action.READ,
Permission.Action.WRITE));
Put p = new Put(TEST_ROW1);
- p.addColumn(TEST_FAMILY1, TEST_Q1, (long) 123, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q1, 123, ZERO);
p.setACL(permsU1andOwner);
t.put(p);
p = new Put(TEST_ROW1);
- p.addColumn(TEST_FAMILY1, TEST_Q2, (long) 123, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q2, 123, ZERO);
p.setACL(permsU2andGUandOwner);
t.put(p);
p = new Put(TEST_ROW1);
- p.addColumn(TEST_FAMILY1, TEST_Q1, (long) 127, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q1, 127, ZERO);
p.setACL(permsU2andGUandOwner);
t.put(p);
p = new Put(TEST_ROW1);
- p.addColumn(TEST_FAMILY1, TEST_Q2, (long) 127, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q2, 127, ZERO);
p.setACL(permsU1andOwner);
t.put(p);
}
@@ -798,7 +790,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
try (Connection connection = ConnectionFactory.createConnection(conf)) {
try (Table t = connection.getTable(TEST_TABLE.getTableName())) {
Put p = new Put(TEST_ROW1);
- p.addColumn(TEST_FAMILY1, TEST_Q1, (long) 125, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q1, 125, ZERO);
p.addColumn(TEST_FAMILY1, TEST_Q2, ZERO);
p.setACL(user2.getShortName(), new Permission(Permission.Action.READ,
Permission.Action.WRITE));
@@ -863,26 +855,26 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
AuthUtil.toGroupEntry(GROUP) }, Action.READ, Action.WRITE);
Put p = new Put(TEST_ROW1);
- p.addColumn(TEST_FAMILY1, TEST_Q1, (long) 120, ZERO);
- p.addColumn(TEST_FAMILY1, TEST_Q2, (long) 120, ZERO);
- p.addColumn(TEST_FAMILY1, TEST_Q3, (long) 120, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q1, 120, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q2, 120, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q3, 120, ZERO);
p.setACL(permsU1andU2andGUandOwner);
t.put(p);
p = new Put(TEST_ROW1);
- p.addColumn(TEST_FAMILY1, TEST_Q1, (long) 123, ZERO);
- p.addColumn(TEST_FAMILY1, TEST_Q2, (long) 123, ZERO);
- p.addColumn(TEST_FAMILY1, TEST_Q3, (long) 123, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q1, 123, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q2, 123, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q3, 123, ZERO);
p.setACL(permsU1andOwner);
t.put(p);
p = new Put(TEST_ROW1);
- p.addColumn(TEST_FAMILY1, TEST_Q1, (long) 127, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q1, 127, ZERO);
p.setACL(permsU1_U2andGU);
t.put(p);
p = new Put(TEST_ROW1);
- p.addColumn(TEST_FAMILY1, TEST_Q2, (long) 127, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q2, 127, ZERO);
p.setACL(user2.getShortName(), new Permission(Permission.Action.READ));
t.put(p);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java
index 3aa97b7497..319c75a5f4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java
@@ -23,8 +23,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.AuthUtil;
import org.apache.hadoop.hbase.Cell;
@@ -34,6 +32,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableNotFoundException;
+import org.apache.hadoop.hbase.TestTableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -52,10 +51,7 @@ import org.apache.hadoop.hbase.security.access.Permission.Action;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.SecurityTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.TestTableName;
import org.apache.hadoop.hbase.util.Threads;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
@@ -63,18 +59,14 @@ import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@Category({SecurityTests.class, LargeTests.class})
public class TestCellACLs extends SecureTestUtil {
- private static final Log LOG = LogFactory.getLog(TestCellACLs.class);
-
- static {
- Logger.getLogger(AccessController.class).setLevel(Level.TRACE);
- Logger.getLogger(AccessControlFilter.class).setLevel(Level.TRACE);
- Logger.getLogger(TableAuthManager.class).setLevel(Level.TRACE);
- }
+ private static final Logger LOG = LoggerFactory.getLogger(TestCellACLs.class);
@Rule
public TestTableName TEST_TABLE = new TestTableName();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCoprocessorWhitelistMasterObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCoprocessorWhitelistMasterObserver.java
index ac13c8b515..a357c1fc4c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCoprocessorWhitelistMasterObserver.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCoprocessorWhitelistMasterObserver.java
@@ -23,8 +23,6 @@ import static org.junit.Assert.fail;
import java.io.IOException;
import java.util.Optional;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
@@ -50,13 +48,16 @@ import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Performs coprocessor loads for variuos paths and malformed strings
*/
@Category({SecurityTests.class, MediumTests.class})
public class TestCoprocessorWhitelistMasterObserver extends SecureTestUtil {
- private static final Log LOG = LogFactory.getLog(TestCoprocessorWhitelistMasterObserver.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestCoprocessorWhitelistMasterObserver.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final TableName TEST_TABLE = TableName.valueOf("testTable");
private static final byte[] TEST_FAMILY = Bytes.toBytes("fam1");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java
index 2b0091d658..7db5fe00d4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java
@@ -24,9 +24,8 @@ import static org.junit.Assert.assertTrue;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
+import java.util.Objects;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -53,14 +52,15 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
import com.google.protobuf.BlockingRpcChannel;
@Category({SecurityTests.class, MediumTests.class})
public class TestNamespaceCommands extends SecureTestUtil {
private static HBaseTestingUtility UTIL = new HBaseTestingUtility();
- private static final Log LOG = LogFactory.getLog(TestNamespaceCommands.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestNamespaceCommands.class);
private static String TEST_NAMESPACE = "ns1";
private static String TEST_NAMESPACE2 = "ns2";
private static Configuration conf;
@@ -204,7 +204,7 @@ public class TestNamespaceCommands extends SecureTestUtil {
perms = AccessControlLists.getNamespacePermissions(conf, TEST_NAMESPACE);
for (Map.Entry entry : perms.entries()) {
- LOG.debug(entry);
+ LOG.debug(Objects.toString(entry));
}
assertEquals(6, perms.size());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java
index f60209fff7..fcf989b67a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java
@@ -17,12 +17,12 @@
*/
package org.apache.hadoop.hbase.security.access;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
import java.util.UUID;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableNotFoundException;
+import org.apache.hadoop.hbase.TestTableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -44,9 +45,6 @@ import org.apache.hadoop.hbase.security.access.Permission.Action;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.SecurityTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.TestTableName;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
@@ -54,16 +52,12 @@ import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({SecurityTests.class, MediumTests.class})
public class TestScanEarlyTermination extends SecureTestUtil {
- private static final Log LOG = LogFactory.getLog(TestScanEarlyTermination.class);
-
- static {
- Logger.getLogger(AccessController.class).setLevel(Level.TRACE);
- Logger.getLogger(AccessControlFilter.class).setLevel(Level.TRACE);
- Logger.getLogger(TableAuthManager.class).setLevel(Level.TRACE);
- }
+ private static final Logger LOG = LoggerFactory.getLogger(TestScanEarlyTermination.class);
@Rule
public TestTableName TEST_TABLE = new TestTableName();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java
index 607ea8c5f2..1270e8a374 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java
@@ -29,8 +29,6 @@ import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.TableName;
@@ -50,7 +48,8 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
@@ -59,7 +58,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
*/
@Category({SecurityTests.class, LargeTests.class})
public class TestTablePermissions {
- private static final Log LOG = LogFactory.getLog(TestTablePermissions.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestTablePermissions.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static ZKWatcher ZKW;
private final static Abortable ABORTABLE = new Abortable() {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java
index 2fd3909344..58e7737518 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java
@@ -17,26 +17,24 @@
*/
package org.apache.hadoop.hbase.security.access;
-import static org.junit.Assert.*;
-import static org.mockito.Mockito.*;
+import static org.junit.Assert.assertEquals;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.TableNotFoundException;
+import org.apache.hadoop.hbase.TestTableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Connection;
@@ -62,18 +60,14 @@ import org.apache.hadoop.hbase.regionserver.FlushLifeCycleTracker;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost;
-import org.apache.hadoop.hbase.regionserver.RegionScanner;
import org.apache.hadoop.hbase.regionserver.RegionServerCoprocessorHost;
-import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.access.Permission.Action;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.SecurityTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
-import org.apache.hadoop.hbase.TestTableName;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
+import org.apache.hadoop.hbase.wal.WALEdit;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
@@ -81,19 +75,14 @@ import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@Category({SecurityTests.class, LargeTests.class})
public class TestWithDisabledAuthorization extends SecureTestUtil {
- private static final Log LOG = LogFactory.getLog(TestWithDisabledAuthorization.class);
-
- static {
- Logger.getLogger(AccessController.class).setLevel(Level.TRACE);
- Logger.getLogger(AccessControlFilter.class).setLevel(Level.TRACE);
- Logger.getLogger(TableAuthManager.class).setLevel(Level.TRACE);
- }
-
+ private static final Logger LOG = LoggerFactory.getLogger(TestWithDisabledAuthorization.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final byte[] TEST_FAMILY = Bytes.toBytes("f1");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestZKPermissionWatcher.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestZKPermissionWatcher.java
index 18fb15fd52..1bb86f4f5e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestZKPermissionWatcher.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestZKPermissionWatcher.java
@@ -24,8 +24,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.TableName;
@@ -39,13 +37,15 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test the reading and writing of access permissions to and from zookeeper.
*/
@Category({SecurityTests.class, LargeTests.class})
public class TestZKPermissionWatcher {
- private static final Log LOG = LogFactory.getLog(TestZKPermissionWatcher.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestZKPermissionWatcher.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static TableAuthManager AUTH_A;
private static TableAuthManager AUTH_B;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java
index d61f98e3c6..feffdee42e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java
@@ -32,8 +32,6 @@ import java.util.Collection;
import java.util.List;
import java.util.concurrent.ConcurrentMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hbase.ChoreService;
@@ -57,6 +55,7 @@ import org.apache.hadoop.hbase.ipc.RpcServerFactory;
import org.apache.hadoop.hbase.ipc.RpcServerInterface;
import org.apache.hadoop.hbase.ipc.ServerRpcController;
import org.apache.hadoop.hbase.ipc.SimpleRpcServer;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.metrics.MetricRegistry;
import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos;
import org.apache.hadoop.hbase.regionserver.HRegion;
@@ -64,9 +63,6 @@ import org.apache.hadoop.hbase.regionserver.OnlineRegions;
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
import org.apache.hadoop.hbase.security.SecurityInfo;
import org.apache.hadoop.hbase.security.User;
-import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor;
-import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.ServiceDescriptor;
-import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.SecurityTests;
import org.apache.hadoop.hbase.util.Bytes;
@@ -92,6 +88,12 @@ import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameter;
import org.junit.runners.Parameterized.Parameters;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.ServiceDescriptor;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
import com.google.protobuf.BlockingService;
import com.google.protobuf.RpcController;
@@ -114,7 +116,7 @@ public class TestTokenAuthentication {
System.setProperty("java.security.krb5.realm", "hbase");
System.setProperty("java.security.krb5.kdc", "blah");
}
- private static final Log LOG = LogFactory.getLog(TestTokenAuthentication.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestTokenAuthentication.class);
public interface AuthenticationServiceSecurityInfo {}
@@ -123,7 +125,7 @@ public class TestTokenAuthentication {
*/
private static class TokenServer extends TokenProvider implements
AuthenticationProtos.AuthenticationService.BlockingInterface, Runnable, Server {
- private static final Log LOG = LogFactory.getLog(TokenServer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TokenServer.class);
private Configuration conf;
private HBaseTestingUtility TEST_UTIL;
private RpcServerInterface rpcServer;
@@ -252,7 +254,7 @@ public class TestTokenAuthentication {
@Override
public void abort(String reason, Throwable error) {
- LOG.fatal("Aborting on: "+reason, error);
+ LOG.error(HBaseMarkers.FATAL, "Aborting on: "+reason, error);
this.aborted = true;
this.stopped = true;
sleeper.skipSleepCycle();
@@ -339,6 +341,7 @@ public class TestTokenAuthentication {
started = true;
}
+ @Override
public void run() {
try {
initialize();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcher.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcher.java
index ea0733874d..1e5ea533c9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcher.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcher.java
@@ -26,8 +26,6 @@ import static org.junit.Assert.assertTrue;
import java.util.concurrent.CountDownLatch;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -40,6 +38,8 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test the synchronization of token authentication master keys through
@@ -47,7 +47,7 @@ import org.junit.experimental.categories.Category;
*/
@Category({SecurityTests.class, LargeTests.class})
public class TestZKSecretWatcher {
- private static final Log LOG = LogFactory.getLog(TestZKSecretWatcher.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestZKSecretWatcher.class);
private static HBaseTestingUtility TEST_UTIL;
private static AuthenticationTokenSecretManager KEY_MASTER;
private static AuthenticationTokenSecretManagerForTest KEY_SLAVE;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcherRefreshKeys.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcherRefreshKeys.java
index 1757ddd0b1..391a844785 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcherRefreshKeys.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcherRefreshKeys.java
@@ -18,8 +18,6 @@
package org.apache.hadoop.hbase.security.token;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -35,13 +33,15 @@ import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test the refreshKeys in ZKSecretWatcher
*/
@Category({ SecurityTests.class, SmallTests.class })
public class TestZKSecretWatcherRefreshKeys {
- private static final Log LOG = LogFactory.getLog(TestZKSecretWatcherRefreshKeys.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestZKSecretWatcherRefreshKeys.class);
private static HBaseTestingUtility TEST_UTIL;
private static class MockAbortable implements Abortable {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java
index 29ddfce402..9d60e1088a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java
@@ -32,8 +32,6 @@ import java.util.Iterator;
import java.util.List;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.AuthUtil;
@@ -65,6 +63,8 @@ import org.apache.hadoop.hbase.security.visibility.expression.Operator;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This is a VisibilityLabelService where labels in Mutation's visibility
@@ -74,8 +74,8 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelService {
-
- private static final Log LOG = LogFactory.getLog(ExpAsStringVisibilityLabelServiceImpl.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(ExpAsStringVisibilityLabelServiceImpl.class);
private static final byte[] DUMMY_VALUE = new byte[0];
private static final byte STRING_SERIALIZATION_FORMAT = 2;
@@ -284,7 +284,7 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer
authLabels = (authLabels == null) ? new ArrayList<>() : authLabels;
authorizations = new Authorizations(authLabels);
} catch (Throwable t) {
- LOG.error(t);
+ LOG.error(t.toString(), t);
throw new IOException(t);
}
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java
index 69040811bd..521cafe7ec 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java
@@ -25,8 +25,6 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.security.PrivilegedExceptionAction;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
@@ -54,11 +52,13 @@ import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.junit.Before;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ SecurityTests.class, MediumTests.class })
public class TestVisibilityLabelReplicationWithExpAsString extends TestVisibilityLabelsReplication {
- private static final Log LOG = LogFactory
- .getLog(TestVisibilityLabelReplicationWithExpAsString.class);
+ private static final Logger LOG = LoggerFactory
+ .getLogger(TestVisibilityLabelReplicationWithExpAsString.class);
@Override
@Before
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java
index dfa0e73997..58be8f9880 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java
@@ -29,8 +29,6 @@ import java.util.List;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
@@ -81,10 +79,12 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ SecurityTests.class, MediumTests.class })
public class TestVisibilityLabelsReplication {
- private static final Log LOG = LogFactory.getLog(TestVisibilityLabelsReplication.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestVisibilityLabelsReplication.class);
protected static final int NON_VIS_TAG_TYPE = 100;
protected static final String TEMP = "temp";
protected static Configuration conf;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDefaultVisLabelService.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDefaultVisLabelService.java
index ab3440cdaa..189b37f935 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDefaultVisLabelService.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDefaultVisLabelService.java
@@ -29,8 +29,6 @@ import java.security.PrivilegedExceptionAction;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -53,12 +51,14 @@ import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.google.protobuf.ByteString;
@Category({SecurityTests.class, MediumTests.class})
public class TestVisibilityLabelsWithDefaultVisLabelService extends TestVisibilityLabels {
- private static final Log LOG = LogFactory.getLog(
+ private static final Logger LOG = LoggerFactory.getLogger(
TestVisibilityLabelsWithDefaultVisLabelService.class);
@BeforeClass
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java
index de8fa23bf7..0a7d918fd4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java
@@ -17,8 +17,6 @@
*/
package org.apache.hadoop.hbase.security.visibility;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
@@ -56,6 +54,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InterruptedIOException;
@@ -73,7 +73,7 @@ import static org.junit.Assert.assertTrue;
*/
@Category({SecurityTests.class, MediumTests.class})
public class TestVisibilityLabelsWithDeletes {
- private static final Log LOG = LogFactory.getLog(TestVisibilityLabelsWithDeletes.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestVisibilityLabelsWithDeletes.class);
private static final String TOPSECRET = "TOPSECRET";
private static final String PUBLIC = "PUBLIC";
private static final String PRIVATE = "PRIVATE";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestWithDisabledAuthorization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestWithDisabledAuthorization.java
index 3d53a1e637..43ba304102 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestWithDisabledAuthorization.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestWithDisabledAuthorization.java
@@ -18,7 +18,9 @@
package org.apache.hadoop.hbase.security.visibility;
import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_NAME;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
@@ -54,7 +56,7 @@ import com.google.protobuf.ByteString;
public class TestWithDisabledAuthorization {
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
-
+
private static final String CONFIDENTIAL = "confidential";
private static final String SECRET = "secret";
private static final String PRIVATE = "private";
@@ -63,7 +65,7 @@ public class TestWithDisabledAuthorization {
private static final byte[] ZERO = Bytes.toBytes(0L);
- @Rule
+ @Rule
public final TestName TEST_NAME = new TestName();
private static User SUPERUSER;
@@ -95,6 +97,7 @@ public class TestWithDisabledAuthorization {
// Define test labels
SUPERUSER.runAs(new PrivilegedExceptionAction() {
+ @Override
public Void run() throws Exception {
try (Connection conn = ConnectionFactory.createConnection(conf)) {
VisibilityClient.addLabels(conn,
@@ -103,7 +106,7 @@ public class TestWithDisabledAuthorization {
new String[] { SECRET, CONFIDENTIAL },
USER_RW.getShortName());
} catch (Throwable t) {
- fail("Should not have failed");
+ fail("Should not have failed");
}
return null;
}
@@ -120,13 +123,14 @@ public class TestWithDisabledAuthorization {
// Even though authorization is disabled, we should be able to manage user auths
SUPERUSER.runAs(new PrivilegedExceptionAction() {
+ @Override
public Void run() throws Exception {
try (Connection conn = ConnectionFactory.createConnection(conf)) {
VisibilityClient.setAuths(conn,
new String[] { SECRET, CONFIDENTIAL },
USER_RW.getShortName());
} catch (Throwable t) {
- fail("Should not have failed");
+ fail("Should not have failed");
}
return null;
}
@@ -134,6 +138,7 @@ public class TestWithDisabledAuthorization {
PrivilegedExceptionAction> getAuths =
new PrivilegedExceptionAction>() {
+ @Override
public List run() throws Exception {
GetAuthsResponse authsResponse = null;
try (Connection conn = ConnectionFactory.createConnection(conf)) {
@@ -156,13 +161,14 @@ public class TestWithDisabledAuthorization {
assertTrue(authsList.contains(CONFIDENTIAL));
SUPERUSER.runAs(new PrivilegedExceptionAction() {
+ @Override
public Void run() throws Exception {
try (Connection conn = ConnectionFactory.createConnection(conf)) {
VisibilityClient.clearAuths(conn,
new String[] { SECRET },
USER_RW.getShortName());
} catch (Throwable t) {
- fail("Should not have failed");
+ fail("Should not have failed");
}
return null;
}
@@ -173,13 +179,14 @@ public class TestWithDisabledAuthorization {
assertTrue(authsList.contains(CONFIDENTIAL));
SUPERUSER.runAs(new PrivilegedExceptionAction() {
+ @Override
public Void run() throws Exception {
try (Connection conn = ConnectionFactory.createConnection(conf)) {
VisibilityClient.clearAuths(conn,
new String[] { CONFIDENTIAL },
USER_RW.getShortName());
} catch (Throwable t) {
- fail("Should not have failed");
+ fail("Should not have failed");
}
return null;
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java
index 64a60c2a63..06de7b1d71 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java
@@ -30,8 +30,6 @@ import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -72,7 +70,8 @@ import org.apache.hadoop.hbase.util.FSVisitor;
import org.apache.hadoop.hbase.util.MD5Hash;
import org.apache.yetus.audience.InterfaceAudience;
import org.junit.Assert;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse;
@@ -86,7 +85,7 @@ import com.google.protobuf.ServiceException;
*/
@InterfaceAudience.Private
public final class SnapshotTestingUtils {
- private static final Log LOG = LogFactory.getLog(SnapshotTestingUtils.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SnapshotTestingUtils.class);
// default number of regions (and keys) given by getSplitKeys() and createTable()
private static byte[] KEYS = Bytes.toBytes("0123456");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java
index 2882120120..a2c015c6b5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java
@@ -29,8 +29,6 @@ import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -61,6 +59,8 @@ import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test creating/using/deleting snapshots from the client
@@ -72,7 +72,7 @@ import org.junit.rules.TestRule;
*/
@Category({RegionServerTests.class, LargeTests.class})
public class TestFlushSnapshotFromClient {
- private static final Log LOG = LogFactory.getLog(TestFlushSnapshotFromClient.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestFlushSnapshotFromClient.class);
@ClassRule
public static final TestRule timeout =
CategoryBasedTimeout.forClass(TestFlushSnapshotFromClient.class);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobFlushSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobFlushSnapshotFromClient.java
index bba293745c..44c14aa0ae 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobFlushSnapshotFromClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobFlushSnapshotFromClient.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.snapshot;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -33,6 +31,8 @@ import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test creating/using/deleting snapshots from the client
@@ -44,7 +44,7 @@ import org.junit.rules.TestRule;
*/
@Category({ClientTests.class, LargeTests.class})
public class TestMobFlushSnapshotFromClient extends TestFlushSnapshotFromClient {
- private static final Log LOG = LogFactory.getLog(TestFlushSnapshotFromClient.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestFlushSnapshotFromClient.class);
@ClassRule
public static final TestRule timeout =
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobRestoreFlushSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobRestoreFlushSnapshotFromClient.java
index 2a4ddde782..e86115065d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobRestoreFlushSnapshotFromClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobRestoreFlushSnapshotFromClient.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.snapshot;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName;
@@ -30,6 +28,8 @@ import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.junit.BeforeClass;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test clone/restore snapshots from the client
@@ -39,7 +39,7 @@ import org.junit.experimental.categories.Category;
*/
@Category({ClientTests.class,LargeTests.class})
public class TestMobRestoreFlushSnapshotFromClient extends TestRestoreFlushSnapshotFromClient {
- final Log LOG = LogFactory.getLog(getClass());
+ final Logger LOG = LoggerFactory.getLogger(getClass());
@BeforeClass
public static void setupCluster() throws Exception {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobRestoreSnapshotHelper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobRestoreSnapshotHelper.java
index 9126d8d3b8..f4e9870dde 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobRestoreSnapshotHelper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobRestoreSnapshotHelper.java
@@ -19,20 +19,20 @@ package org.apache.hadoop.hbase.snapshot;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.mob.MobConstants;
import org.apache.hadoop.hbase.snapshot.MobSnapshotTestingUtils.SnapshotMock;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test the restore/clone operation from a file-system point of view.
*/
@Category(SmallTests.class)
public class TestMobRestoreSnapshotHelper extends TestRestoreSnapshotHelper {
- final Log LOG = LogFactory.getLog(getClass());
+ final Logger LOG = LoggerFactory.getLogger(getClass());
@Override
protected void setupConf(Configuration conf) {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRegionSnapshotTask.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRegionSnapshotTask.java
index df8fc64486..380beba331 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRegionSnapshotTask.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRegionSnapshotTask.java
@@ -17,8 +17,6 @@
*/
package org.apache.hadoop.hbase.snapshot;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -42,6 +40,8 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.List;
@@ -61,7 +61,7 @@ import static org.mockito.Mockito.spy;
*/
@Category({MediumTests.class, RegionServerTests.class})
public class TestRegionSnapshotTask {
- private final Log LOG = LogFactory.getLog(getClass());
+ private final Logger LOG = LoggerFactory.getLogger(getClass());
private static HBaseTestingUtility TEST_UTIL;
private static Configuration conf;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java
index 6bc33145bb..160f5099e0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.snapshot;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
@@ -39,6 +37,8 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test clone/restore snapshots from the client
@@ -48,7 +48,8 @@ import org.junit.experimental.categories.Category;
*/
@Category({RegionServerTests.class, LargeTests.class})
public class TestRestoreFlushSnapshotFromClient {
- private static final Log LOG = LogFactory.getLog(TestRestoreFlushSnapshotFromClient.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestRestoreFlushSnapshotFromClient.class);
protected final static HBaseTestingUtility UTIL = new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java
index b7110b2467..02bdae1a74 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java
@@ -23,8 +23,6 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -46,13 +44,15 @@ import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test the restore/clone operation from a file-system point of view.
*/
@Category({RegionServerTests.class, SmallTests.class})
public class TestRestoreSnapshotHelper {
- private static final Log LOG = LogFactory.getLog(TestRestoreSnapshotHelper.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRestoreSnapshotHelper.class);
protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
protected final static String TEST_HFILE = "abc";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotClientRetries.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotClientRetries.java
index 1c6920d5b5..bb492bad2f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotClientRetries.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotClientRetries.java
@@ -24,8 +24,6 @@ import java.io.IOException;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.SnapshotDescription;
@@ -42,11 +40,13 @@ import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ MediumTests.class })
public class TestSnapshotClientRetries {
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
- private static final Log LOG = LogFactory.getLog(TestSnapshotClientRetries.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSnapshotClientRetries.class);
@Rule public TestTableName TEST_TABLE = new TestTableName();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotDescriptionUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotDescriptionUtils.java
index 038ee8cdb1..73e0560ef1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotDescriptionUtils.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotDescriptionUtils.java
@@ -22,8 +22,6 @@ import static org.junit.Assert.fail;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -37,6 +35,8 @@ import org.junit.After;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test that the {@link SnapshotDescription} helper is helping correctly.
@@ -66,7 +66,7 @@ public class TestSnapshotDescriptionUtils {
EnvironmentEdgeManagerTestHelper.reset();
}
- private static final Log LOG = LogFactory.getLog(TestSnapshotDescriptionUtils.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSnapshotDescriptionUtils.class);
@Test
public void testValidateMissingTableName() throws IOException {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java
index 8ba4262ecc..2758c03cca 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java
@@ -21,8 +21,6 @@ import static org.junit.Assert.fail;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
@@ -43,10 +41,12 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MasterTests.class, SmallTests.class})
public class TestSnapshotManifest {
- private final Log LOG = LogFactory.getLog(getClass());
+ private final Logger LOG = LoggerFactory.getLogger(getClass());
private static final String TABLE_NAME_STR = "testSnapshotManifest";
private static final TableName TABLE_NAME = TableName.valueOf(TABLE_NAME_STR);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java
index bf43982f7d..7e1e264ea3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java
@@ -32,11 +32,8 @@ import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.atomic.AtomicInteger;
-import java.util.regex.Pattern;
import java.util.stream.IntStream;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -64,6 +61,7 @@ import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.regionserver.TestHRegionServerBulkLoad;
import org.apache.hadoop.hbase.testclassification.LargeTests;
@@ -78,6 +76,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
@@ -90,7 +90,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHF
*/
@Category({ MiscTests.class, LargeTests.class })
public class TestLoadIncrementalHFilesSplitRecovery {
- private static final Log LOG = LogFactory.getLog(TestHRegionServerBulkLoad.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHRegionServerBulkLoad.class);
static HBaseTestingUtility util;
// used by secure subclass
@@ -289,7 +289,7 @@ public class TestLoadIncrementalHFilesSplitRecovery {
errConn = getMockedConnection(util.getConfiguration());
serviceCallable = this.buildClientServiceCallable(errConn, table, first, lqis, true);
} catch (Exception e) {
- LOG.fatal("mocking cruft, should never happen", e);
+ LOG.error(HBaseMarkers.FATAL, "mocking cruft, should never happen", e);
throw new RuntimeException("mocking cruft, should never happen");
}
failedCalls.incrementAndGet();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/BaseTestHBaseFsck.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/BaseTestHBaseFsck.java
index d9bef119db..fbc0096a5c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/BaseTestHBaseFsck.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/BaseTestHBaseFsck.java
@@ -34,8 +34,6 @@ import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -77,7 +75,8 @@ import org.apache.hadoop.hbase.util.HBaseFsck.TableInfo;
import org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker;
import org.apache.zookeeper.KeeperException;
import org.junit.rules.TestName;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
@@ -92,7 +91,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
*/
public class BaseTestHBaseFsck {
static final int POOL_SIZE = 7;
- protected static final Log LOG = LogFactory.getLog(BaseTestHBaseFsck.class);
+ protected static final Logger LOG = LoggerFactory.getLogger(BaseTestHBaseFsck.class);
protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
protected final static Configuration conf = TEST_UTIL.getConfiguration();
protected final static String FAM_STR = "fam";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java
index ad9d217469..22a99a35df 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java
@@ -26,8 +26,6 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -35,13 +33,15 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.Store;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test helper for testing archiving of HFiles
*/
public class HFileArchiveTestingUtil {
- private static final Log LOG = LogFactory.getLog(HFileArchiveTestingUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HFileArchiveTestingUtil.class);
private HFileArchiveTestingUtil() {
// NOOP private ctor since this is just a utility class
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java
index dade11e78b..551b940cf6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.util;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hbase.ChoreService;
@@ -31,14 +29,17 @@ import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.client.ClusterConnection;
import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Basic mock Server for handler tests.
*/
public class MockServer implements Server {
- private static final Log LOG = LogFactory.getLog(MockServer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MockServer.class);
final static ServerName NAME = ServerName.valueOf("MockServer", 123, -1);
boolean stopped;
@@ -46,7 +47,6 @@ public class MockServer implements Server {
final ZKWatcher zk;
final HBaseTestingUtility htu;
- @SuppressWarnings("unused")
public MockServer() throws ZooKeeperConnectionException, IOException {
// Shutdown default constructor by making it private.
this(null);
@@ -73,7 +73,7 @@ public class MockServer implements Server {
@Override
public void abort(String why, Throwable e) {
- LOG.fatal("Abort why=" + why, e);
+ LOG.error(HBaseMarkers.FATAL, "Abort why=" + why, e);
stop(why);
this.aborted = true;
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
index 6bfb23ea35..d1982194d9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
@@ -31,8 +31,6 @@ import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
@@ -45,13 +43,15 @@ import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType;
import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Common base class for reader and writer parts of multi-thread HBase load
* test (See LoadTestTool).
*/
public abstract class MultiThreadedAction {
- private static final Log LOG = LogFactory.getLog(MultiThreadedAction.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedAction.class);
protected final TableName tableName;
protected final Configuration conf;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java
index 69256b79b0..447cca870b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java
@@ -23,23 +23,22 @@ import java.util.Set;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.lang3.RandomUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.ClusterConnection;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Consistency;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/** Creates multiple threads that read and verify previously written data */
public class MultiThreadedReader extends MultiThreadedAction
{
- private static final Log LOG = LogFactory.getLog(MultiThreadedReader.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedReader.class);
protected Set readers = new HashSet<>();
private final double verifyPercent;
@@ -286,7 +285,7 @@ public class MultiThreadedReader extends MultiThreadedAction
+ ", time from start: "
+ (System.currentTimeMillis() - startTimeMs) + " ms");
if (printExceptionTrace) {
- LOG.warn(e);
+ LOG.warn(e.toString(), e);
printExceptionTrace = false;
}
}
@@ -302,7 +301,7 @@ public class MultiThreadedReader extends MultiThreadedAction
+ (System.currentTimeMillis() - startTimeMs) + " ms");
}
if (printExceptionTrace) {
- LOG.warn(e);
+ LOG.warn(e.toString(), e);
printExceptionTrace = false;
}
}
@@ -379,7 +378,7 @@ public class MultiThreadedReader extends MultiThreadedAction
numKeysVerified.incrementAndGet();
}
} else {
- HRegionLocation hloc = ((ClusterConnection) connection).getRegionLocation(tableName,
+ HRegionLocation hloc = connection.getRegionLocation(tableName,
get.getRow(), false);
String rowKey = Bytes.toString(get.getRow());
LOG.info("Key = " + rowKey + ", Region location: " + hloc);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReaderWithACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReaderWithACL.java
index e951175372..86a8500bcb 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReaderWithACL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReaderWithACL.java
@@ -21,8 +21,6 @@ import java.security.PrivilegedExceptionAction;
import java.util.HashMap;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Get;
@@ -32,12 +30,14 @@ import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
import org.apache.hadoop.security.UserGroupInformation;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A MultiThreadReader that helps to work with ACL
*/
public class MultiThreadedReaderWithACL extends MultiThreadedReader {
- private static final Log LOG = LogFactory.getLog(MultiThreadedReaderWithACL.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedReaderWithACL.class);
private static final String COMMA = ",";
/**
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
index acc1c5ed10..768f961fd7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
@@ -30,8 +30,6 @@ import java.util.Map;
import java.util.Set;
import org.apache.commons.lang3.RandomUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
@@ -49,12 +47,13 @@ import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType;
import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
import org.apache.hadoop.util.StringUtils;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
/** Creates multiple threads that write key/values into the */
public class MultiThreadedUpdater extends MultiThreadedWriterBase {
- private static final Log LOG = LogFactory.getLog(MultiThreadedUpdater.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedUpdater.class);
protected Set updaters = new HashSet<>();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java
index 663e9875bb..c3258b466e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java
@@ -24,8 +24,6 @@ import java.security.PrivilegedExceptionAction;
import java.util.HashMap;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Append;
@@ -42,12 +40,14 @@ import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A MultiThreadUpdater that helps to work with ACL
*/
public class MultiThreadedUpdaterWithACL extends MultiThreadedUpdater {
- private static final Log LOG = LogFactory.getLog(MultiThreadedUpdaterWithACL.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedUpdaterWithACL.class);
private final static String COMMA= ",";
private User userOwner;
/**
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
index 07e9cc8ae3..bcd24d57b7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
@@ -28,8 +28,6 @@ import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
@@ -38,10 +36,12 @@ import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/** Creates multiple threads that write key/values into the */
public class MultiThreadedWriter extends MultiThreadedWriterBase {
- private static final Log LOG = LogFactory.getLog(MultiThreadedWriter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedWriter.class);
protected Set writers = new HashSet<>();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterBase.java
index fbf745ffb0..54be0d3f88 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterBase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterBase.java
@@ -28,17 +28,17 @@ import java.util.concurrent.ConcurrentSkipListSet;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/** Creates multiple threads that write key/values into the */
public abstract class MultiThreadedWriterBase extends MultiThreadedAction {
- private static final Log LOG = LogFactory.getLog(MultiThreadedWriterBase.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedWriterBase.class);
/**
* A temporary place to keep track of inserted/updated keys. This is written to by
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterWithACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterWithACL.java
index 48062882f2..7d7f5971da 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterWithACL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterWithACL.java
@@ -22,8 +22,6 @@ import java.io.PrintWriter;
import java.io.StringWriter;
import java.security.PrivilegedExceptionAction;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Put;
@@ -32,13 +30,15 @@ import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* MultiThreadedWriter that helps in testing ACL
*/
public class MultiThreadedWriterWithACL extends MultiThreadedWriter {
- private static final Log LOG = LogFactory.getLog(MultiThreadedWriterWithACL.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedWriterWithACL.class);
private User userOwner;
public MultiThreadedWriterWithACL(LoadTestDataGenerator dataGen, Configuration conf,
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/ProcessBasedLocalHBaseCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/ProcessBasedLocalHBaseCluster.java
index a5cf0bd4a6..6415fdb62e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/ProcessBasedLocalHBaseCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/ProcessBasedLocalHBaseCluster.java
@@ -39,8 +39,6 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.io.FileUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
@@ -51,6 +49,8 @@ import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A helper class for process-based mini-cluster tests. Unlike
@@ -69,7 +69,7 @@ public class ProcessBasedLocalHBaseCluster {
private static final int MAX_FILE_SIZE_OVERRIDE = 10 * 1000 * 1000;
- private static final Log LOG = LogFactory.getLog(
+ private static final Logger LOG = LoggerFactory.getLogger(
ProcessBasedLocalHBaseCluster.class);
private List daemonPidFiles =
@@ -461,7 +461,7 @@ public class ProcessBasedLocalHBaseCluster {
try {
runInternal();
} catch (IOException ex) {
- LOG.error(ex);
+ LOG.error(ex.toString(), ex);
}
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java
index 7972855d0c..d78e34af11 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.util;
import java.io.IOException;
import org.apache.commons.cli.CommandLine;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
@@ -33,6 +31,8 @@ import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A command-line tool that spins up a local process-based cluster, loads
@@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
*/
public class RestartMetaTest extends AbstractHBaseTool {
- private static final Log LOG = LogFactory.getLog(RestartMetaTest.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RestartMetaTest.class);
/** The number of region servers used if not specified */
private static final int DEFAULT_NUM_RS = 2;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java
index 395c04d599..a90a47fc75 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.hbase.util;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
@@ -32,6 +30,8 @@ import org.apache.hadoop.util.NativeCodeLoader;
import org.apache.hadoop.util.ReflectionUtils;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.BufferedOutputStream;
import java.io.DataOutputStream;
@@ -41,7 +41,7 @@ import static org.junit.Assert.*;
@Category({MiscTests.class, SmallTests.class})
public class TestCompressionTest {
- private static final Log LOG = LogFactory.getLog(TestCompressionTest.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestCompressionTest.class);
@Test
public void testExceptionCaching() {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSHDFSUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSHDFSUtils.java
index 5899971838..8f503e034b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSHDFSUtils.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSHDFSUtils.java
@@ -21,8 +21,6 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -35,13 +33,15 @@ import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test our recoverLease loop against mocked up filesystem.
*/
@Category({MiscTests.class, MediumTests.class})
public class TestFSHDFSUtils {
- private static final Log LOG = LogFactory.getLog(TestFSHDFSUtils.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestFSHDFSUtils.class);
private static final HBaseTestingUtility HTU = new HBaseTestingUtility();
static {
Configuration conf = HTU.getConfiguration();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java
index 30a7cd69a6..2c27a9313e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java
@@ -32,8 +32,6 @@ import java.util.Map;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -53,6 +51,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tests for {@link FSTableDescriptors}.
@@ -61,7 +61,7 @@ import org.junit.rules.TestName;
@Category({MiscTests.class, MediumTests.class})
public class TestFSTableDescriptors {
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
- private static final Log LOG = LogFactory.getLog(TestFSTableDescriptors.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestFSTableDescriptors.class);
@Rule
public TestName name = new TestName();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java
index 055c28d38b..3d1f10f20d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java
@@ -30,8 +30,6 @@ import java.io.IOException;
import java.util.Random;
import java.util.UUID;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -52,13 +50,15 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test {@link FSUtils}.
*/
@Category({MiscTests.class, MediumTests.class})
public class TestFSUtils {
- private static final Log LOG = LogFactory.getLog(TestFSUtils.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestFSUtils.class);
private HBaseTestingUtility htu;
private FileSystem fs;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java
index e455b0ad27..cb23a0b350 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java
@@ -25,8 +25,6 @@ import java.util.UUID;
import java.util.Set;
import java.util.HashSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -34,13 +32,15 @@ import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.junit.*;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test {@link FSUtils}.
*/
@Category({MiscTests.class, MediumTests.class})
public class TestFSVisitor {
- private static final Log LOG = LogFactory.getLog(TestFSVisitor.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestFSVisitor.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdLock.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdLock.java
index c3f934dee6..769f8ef6db 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdLock.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdLock.java
@@ -31,18 +31,18 @@ import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MiscTests.class, MediumTests.class})
// Medium as it creates 100 threads; seems better to run it isolated
public class TestIdLock {
- private static final Log LOG = LogFactory.getLog(TestIdLock.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestIdLock.class);
private static final int NUM_IDS = 16;
private static final int NUM_THREADS = 128;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdReadWriteLock.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdReadWriteLock.java
index 7dd2a6322a..af89d357d8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdReadWriteLock.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdReadWriteLock.java
@@ -35,8 +35,6 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.util.IdReadWriteLock.ReferenceType;
@@ -44,13 +42,15 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@RunWith(Parameterized.class)
@Category({MiscTests.class, MediumTests.class})
// Medium as it creates 100 threads; seems better to run it isolated
public class TestIdReadWriteLock {
- private static final Log LOG = LogFactory.getLog(TestIdReadWriteLock.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestIdReadWriteLock.class);
private static final int NUM_IDS = 16;
private static final int NUM_THREADS = 128;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestJSONMetricUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestJSONMetricUtil.java
index 1135039100..13c6df5d1e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestJSONMetricUtil.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestJSONMetricUtil.java
@@ -34,17 +34,18 @@ import static org.junit.Assert.assertNull;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MiscTests.class, SmallTests.class})
public class TestJSONMetricUtil {
- private static final Log LOG = LogFactory.getLog(TestJSONMetricUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestJSONMetricUtil.class);
@Test
public void testBuildHashtable() {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java
index be4e850c05..adc7567f2d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java
@@ -24,8 +24,6 @@ import java.util.Collection;
import java.util.EnumSet;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -47,6 +45,8 @@ import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A write/read/verify load test on a mini HBase cluster. Tests reading
@@ -56,7 +56,7 @@ import org.junit.runners.Parameterized.Parameters;
@RunWith(Parameterized.class)
public class TestMiniClusterLoadSequential {
- private static final Log LOG = LogFactory.getLog(
+ private static final Logger LOG = LoggerFactory.getLogger(
TestMiniClusterLoadSequential.class);
protected static final TableName TABLE =
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMover.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMover.java
index 68196a0240..7ab0dd38cf 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMover.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMover.java
@@ -24,8 +24,6 @@ import static org.junit.Assert.assertEquals;
import java.io.File;
import java.io.FileWriter;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MiniHBaseCluster;
@@ -41,6 +39,8 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tests for Region Mover Load/Unload functionality with and without ack mode and also to test
@@ -49,7 +49,7 @@ import org.junit.experimental.categories.Category;
@Category(MediumTests.class)
public class TestRegionMover {
- final Log LOG = LogFactory.getLog(getClass());
+ final Logger LOG = LoggerFactory.getLogger(getClass());
protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@BeforeClass
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java
index 02578ff264..88962f0174 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java
@@ -28,8 +28,6 @@ import java.util.List;
import java.util.SortedSet;
import java.util.UUID;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test;
@@ -38,10 +36,12 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.ComparisonChain;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({MiscTests.class, SmallTests.class})
public class TestRegionSplitCalculator {
- private static final Log LOG = LogFactory.getLog(TestRegionSplitCalculator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRegionSplitCalculator.class);
/**
* This is range uses a user specified start and end keys. It also has an
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java
index 9643443471..40564ee07f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java
@@ -28,8 +28,6 @@ import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang3.ArrayUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HRegionInfo;
@@ -49,6 +47,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tests for {@link RegionSplitter}, which can create a pre-split table or do a
@@ -56,7 +56,7 @@ import org.junit.rules.TestName;
*/
@Category({MiscTests.class, MediumTests.class})
public class TestRegionSplitter {
- private final static Log LOG = LogFactory.getLog(TestRegionSplitter.class);
+ private final static Logger LOG = LoggerFactory.getLogger(TestRegionSplitter.class);
private final static HBaseTestingUtility UTIL = new HBaseTestingUtility();
private final static String CF_NAME = "SPLIT_TEST_CF";
private final static byte xFF = (byte) 0xff;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRootPath.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRootPath.java
index 1ecfa2b077..61d3aea43d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRootPath.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRootPath.java
@@ -23,20 +23,20 @@ import junit.framework.TestCase;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test requirement that root directory must be a URI
*/
@Category({MiscTests.class, SmallTests.class})
public class TestRootPath extends TestCase {
- private static final Log LOG = LogFactory.getLog(TestRootPath.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestRootPath.class);
/** The test */
public void testRootPath() {
@@ -44,14 +44,14 @@ public class TestRootPath extends TestCase {
// Try good path
FSUtils.validateRootPath(new Path("file:///tmp/hbase/hbase"));
} catch (IOException e) {
- LOG.fatal("Unexpected exception checking valid path:", e);
+ LOG.error(HBaseMarkers.FATAL, "Unexpected exception checking valid path:", e);
fail();
}
try {
// Try good path
FSUtils.validateRootPath(new Path("hdfs://a:9000/hbase"));
} catch (IOException e) {
- LOG.fatal("Unexpected exception checking valid path:", e);
+ LOG.error(HBaseMarkers.FATAL, "Unexpected exception checking valid path:", e);
fail();
}
try {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java
index e6070ddd0f..1085ce4a3f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java
@@ -23,8 +23,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
@@ -58,6 +56,8 @@ import org.apache.zookeeper.KeeperException;
import org.junit.After;
import org.junit.Before;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This testing base class creates a minicluster and testing table table
@@ -74,8 +74,8 @@ import org.junit.experimental.categories.Category;
*/
@Category({MiscTests.class, LargeTests.class})
public class OfflineMetaRebuildTestCore {
- private final static Log LOG = LogFactory
- .getLog(OfflineMetaRebuildTestCore.class);
+ private final static Logger LOG = LoggerFactory
+ .getLogger(OfflineMetaRebuildTestCore.class);
protected HBaseTestingUtility TEST_UTIL;
protected Configuration conf;
private final static byte[] FAM = Bytes.toBytes("fam");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/test/LoadTestDataGeneratorWithACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/test/LoadTestDataGeneratorWithACL.java
index 58ba047043..27eeb59408 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/test/LoadTestDataGeneratorWithACL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/test/LoadTestDataGeneratorWithACL.java
@@ -18,9 +18,9 @@ package org.apache.hadoop.hbase.util.test;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.security.access.Permission;
@@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.util.MultiThreadedAction.DefaultDataGenerator;
@InterfaceAudience.Private
public class LoadTestDataGeneratorWithACL extends DefaultDataGenerator {
- private static final Log LOG = LogFactory.getLog(LoadTestDataGeneratorWithACL.class);
+ private static final Logger LOG = LoggerFactory.getLogger(LoadTestDataGeneratorWithACL.class);
private String[] userNames = null;
private static final String COMMA = ",";
private int specialPermCellInsertionFactor = 100;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/IOTestProvider.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/IOTestProvider.java
index f578c11861..bfd18cf5f4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/IOTestProvider.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/IOTestProvider.java
@@ -27,13 +27,13 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
// imports for things that haven't moved from regionserver.wal yet.
import org.apache.hadoop.hbase.regionserver.wal.FSHLog;
import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogWriter;
@@ -70,7 +70,7 @@ import org.apache.hadoop.hbase.wal.WAL.Entry;
*/
@InterfaceAudience.Private
public class IOTestProvider implements WALProvider {
- private static final Log LOG = LogFactory.getLog(IOTestProvider.class);
+ private static final Logger LOG = LoggerFactory.getLogger(IOTestProvider.class);
private static final String ALLOWED_OPERATIONS = "hbase.wal.iotestprovider.operations";
private enum AllowedOperations {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestBoundedRegionGroupingStrategy.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestBoundedRegionGroupingStrategy.java
index 73725bb4e3..be65ba3b50 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestBoundedRegionGroupingStrategy.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestBoundedRegionGroupingStrategy.java
@@ -30,8 +30,6 @@ import java.util.HashSet;
import java.util.Random;
import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
@@ -51,11 +49,14 @@ import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameter;
import org.junit.runners.Parameterized.Parameters;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@RunWith(Parameterized.class)
@Category({ RegionServerTests.class, LargeTests.class })
public class TestBoundedRegionGroupingStrategy {
- private static final Log LOG = LogFactory.getLog(TestBoundedRegionGroupingStrategy.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestBoundedRegionGroupingStrategy.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogProvider.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogProvider.java
index d3d4d53e28..da2965e5ce 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogProvider.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogProvider.java
@@ -30,8 +30,6 @@ import java.util.Random;
import java.util.Set;
import java.util.TreeMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -58,10 +56,12 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({RegionServerTests.class, MediumTests.class})
public class TestFSHLogProvider {
- private static final Log LOG = LogFactory.getLog(TestFSHLogProvider.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestFSHLogProvider.class);
protected static Configuration conf;
protected static FileSystem fs;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java
index ac53ae9554..0814fcb6c0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java
@@ -27,8 +27,6 @@ import java.util.NavigableMap;
import java.util.TreeMap;
import org.apache.commons.io.IOUtils;
-import org.apache.commons.logging.LogFactory;
-import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
@@ -50,7 +48,6 @@ import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils;
-import org.apache.log4j.Level;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
@@ -67,10 +64,6 @@ import org.junit.runners.Parameterized.Parameters;
@Category({ RegionServerTests.class, MediumTests.class })
public class TestSecureWAL {
- static {
- ((Log4JLogger)LogFactory.getLog("org.apache.hadoop.hbase.regionserver.wal"))
- .getLogger().setLevel(Level.ALL);
- };
static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@Rule
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java
index 7c1af2539e..2c19c12965 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java
@@ -30,8 +30,7 @@ import java.net.BindException;
import java.util.List;
import java.util.NavigableMap;
import java.util.TreeMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -71,13 +70,15 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* WAL tests that can be reused across providers.
*/
@Category({RegionServerTests.class, MediumTests.class})
public class TestWALFactory {
- private static final Log LOG = LogFactory.getLog(TestWALFactory.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestWALFactory.class);
protected static Configuration conf;
private static MiniDFSCluster cluster;
@@ -402,7 +403,7 @@ public class TestWALFactory {
// Stop the cluster. (ensure restart since we're sharing MiniDFSCluster)
try {
- DistributedFileSystem dfs = (DistributedFileSystem) cluster.getFileSystem();
+ DistributedFileSystem dfs = cluster.getFileSystem();
dfs.setSafeMode(HdfsConstants.SafeModeAction.SAFEMODE_ENTER);
TEST_UTIL.shutdownMiniDFSCluster();
try {
@@ -410,7 +411,7 @@ public class TestWALFactory {
// but still call this since it closes the LogSyncer thread first
wal.shutdown();
} catch (IOException e) {
- LOG.info(e);
+ LOG.info(e.toString(), e);
}
fs.close(); // closing FS last so DFSOutputStream can't call close
LOG.info("STOPPED first instance of the cluster");
@@ -445,7 +446,7 @@ public class TestWALFactory {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
- LOG.info(e);
+ LOG.info(e.toString(), e);
}
// Now try recovering the log, like the HMaster would do
@@ -454,6 +455,7 @@ public class TestWALFactory {
class RecoverLogThread extends Thread {
public Exception exception = null;
+ @Override
public void run() {
try {
FSUtils.getInstance(fs, rlConf)
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java
index 32253106cc..3672f9b146 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java
@@ -27,8 +27,6 @@ import java.util.NavigableMap;
import java.util.TreeMap;
import org.apache.commons.io.IOUtils;
-import org.apache.commons.logging.LogFactory;
-import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
@@ -58,7 +56,6 @@ import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;
-import org.apache.log4j.Level;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
@@ -70,10 +67,6 @@ import org.junit.rules.TestName;
*/
@Category({RegionServerTests.class, MediumTests.class})
public class TestWALReaderOnSecureWAL {
- static {
- ((Log4JLogger)LogFactory.getLog("org.apache.hadoop.hbase.regionserver.wal"))
- .getLogger().setLevel(Level.ALL);
- };
static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
final byte[] value = Bytes.toBytes("Test value");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALRootDir.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALRootDir.java
index cd8bbe470f..c71fb4b36a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALRootDir.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALRootDir.java
@@ -17,8 +17,6 @@
*/
package org.apache.hadoop.hbase.wal;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -39,6 +37,8 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
@@ -49,7 +49,7 @@ import static org.junit.Assert.assertEquals;
@Category(MediumTests.class)
public class TestWALRootDir {
- private static final Log LOG = LogFactory.getLog(TestWALRootDir.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestWALRootDir.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static Configuration conf;
private static FileSystem fs;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java
index 0fc0df1e64..ad3c2b50c7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java
@@ -39,14 +39,13 @@ import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.NavigableSet;
+import java.util.Objects;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -96,6 +95,8 @@ import org.junit.rules.TestName;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Testing {@link WAL} splitting code.
@@ -109,7 +110,7 @@ public class TestWALSplit {
//((Log4JLogger)LeaseManager.LOG).getLogger().setLevel(Level.ALL);
//((Log4JLogger)FSNamesystem.LOG).getLogger().setLevel(Level.ALL);
}
- private final static Log LOG = LogFactory.getLog(TestWALSplit.class);
+ private final static Logger LOG = LoggerFactory.getLogger(TestWALSplit.class);
private static Configuration conf;
private FileSystem fs;
@@ -240,7 +241,7 @@ public class TestWALSplit {
for (FileStatus status : fs.listStatus(WALDIR)) {
ls.append("\t").append(status.toString()).append("\n");
}
- LOG.debug(ls);
+ LOG.debug(Objects.toString(ls));
LOG.info("Splitting WALs out from under zombie. Expecting " + numWriters + " files.");
WALSplitter.split(HBASEDIR, WALDIR, OLDLOGDIR, fs, conf2, wals);
LOG.info("Finished splitting out from under zombie.");
@@ -820,6 +821,7 @@ public class TestWALSplit {
someOldThread.setDaemon(true);
someOldThread.start();
final Thread t = new Thread("Background-thread-dumper") {
+ @Override
public void run() {
try {
Threads.threadDumpingIsAlive(someOldThread);
@@ -888,6 +890,7 @@ public class TestWALSplit {
"Blocklist for " + OLDLOGDIR + " has changed"};
private int count = 0;
+ @Override
public FSDataInputStream answer(InvocationOnMock invocation) throws Throwable {
if (count < 3) {
throw new IOException(errors[count++]);
@@ -917,6 +920,7 @@ public class TestWALSplit {
FileSystem spiedFs = Mockito.spy(fs);
Mockito.doAnswer(new Answer() {
+ @Override
public FSDataInputStream answer(InvocationOnMock invocation) throws Throwable {
Thread.sleep(1500); // Sleep a while and wait report status invoked
return (FSDataInputStream)invocation.callRealMethod();
@@ -1035,7 +1039,7 @@ public class TestWALSplit {
byte region[] = new byte[] {(byte)'r', (byte) (0x30 + regionIdx)};
Entry ret = createTestEntry(TABLE_NAME, region,
- Bytes.toBytes((int)(index / regions.size())),
+ Bytes.toBytes(index / regions.size()),
FAMILY, QUALIFIER, VALUE, index);
index++;
return ret;
@@ -1157,7 +1161,7 @@ public class TestWALSplit {
try{
logSplitter.splitLogFile(logfiles[0], null);
} catch (IOException e) {
- LOG.info(e);
+ LOG.info(e.toString(), e);
fail("Throws IOException when spliting "
+ "log, it is most likely because writing file does not "
+ "exist which is caused by concurrent replayRecoveredEditsIfAny()");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
index de23d61e88..93e6373545 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
@@ -31,8 +31,6 @@ import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileStatus;
@@ -70,6 +68,8 @@ import org.apache.htrace.core.Sampler;
import org.apache.htrace.core.TraceScope;
import org.apache.htrace.core.Tracer;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.codahale.metrics.ConsoleReporter;
import com.codahale.metrics.Histogram;
@@ -86,7 +86,9 @@ import com.codahale.metrics.MetricRegistry;
*/
@InterfaceAudience.Private
public final class WALPerformanceEvaluation extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(WALPerformanceEvaluation.class.getName());
+ private static final Logger LOG =
+ LoggerFactory.getLogger(WALPerformanceEvaluation.class);
+
private final MetricRegistry metrics = new MetricRegistry();
private final Meter syncMeter =
metrics.meter(name(WALPerformanceEvaluation.class, "syncMeter", "syncs"));
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java
index 346b8fbff0..4e67b9171d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java
@@ -28,8 +28,6 @@ import java.util.List;
import javax.security.auth.login.AppConfigurationEntry;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -46,10 +44,12 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ ZKTests.class, MediumTests.class })
public class TestZooKeeperACL {
- private final static Log LOG = LogFactory.getLog(TestZooKeeperACL.class);
+ private final static Logger LOG = LoggerFactory.getLogger(TestZooKeeperACL.class);
private final static HBaseTestingUtility TEST_UTIL =
new HBaseTestingUtility();
diff --git a/hbase-shaded/pom.xml b/hbase-shaded/pom.xml
index 814c667be4..856d6c3c12 100644
--- a/hbase-shaded/pom.xml
+++ b/hbase-shaded/pom.xml
@@ -59,7 +59,6 @@
org.slf4jslf4j-log4j12
- ${slf4j.version}true
diff --git a/hbase-shell/pom.xml b/hbase-shell/pom.xml
index 0c8e0eb5c2..15b4b6a8b5 100644
--- a/hbase-shell/pom.xml
+++ b/hbase-shell/pom.xml
@@ -212,8 +212,8 @@
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apiorg.jruby
diff --git a/hbase-shell/src/test/rsgroup/org/apache/hadoop/hbase/client/rsgroup/TestShellRSGroups.java b/hbase-shell/src/test/rsgroup/org/apache/hadoop/hbase/client/rsgroup/TestShellRSGroups.java
index 26fa81dc73..70b215c452 100644
--- a/hbase-shell/src/test/rsgroup/org/apache/hadoop/hbase/client/rsgroup/TestShellRSGroups.java
+++ b/hbase-shell/src/test/rsgroup/org/apache/hadoop/hbase/client/rsgroup/TestShellRSGroups.java
@@ -18,8 +18,6 @@
*/
package org.apache.hadoop.hbase.client.rsgroup;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
@@ -35,6 +33,8 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
@@ -44,7 +44,7 @@ import java.util.List;
//Since we need to use a different balancer and run more than 1 RS
@Category({ClientTests.class, LargeTests.class})
public class TestShellRSGroups {
- final Log LOG = LogFactory.getLog(getClass());
+ final Logger LOG = LoggerFactory.getLogger(getClass());
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private final static ScriptingContainer jruby = new ScriptingContainer();
private static String basePath;
diff --git a/hbase-spark-it/pom.xml b/hbase-spark-it/pom.xml
index 3910dc8541..ff6581fb07 100644
--- a/hbase-spark-it/pom.xml
+++ b/hbase-spark-it/pom.xml
@@ -239,8 +239,8 @@
hbase-testing-util
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apicommons-cli
diff --git a/hbase-spark-it/src/test/java/org/apache/hadoop/hbase/spark/IntegrationTestSparkBulkLoad.java b/hbase-spark-it/src/test/java/org/apache/hadoop/hbase/spark/IntegrationTestSparkBulkLoad.java
index 89eb087e66..4e4f7f61c4 100644
--- a/hbase-spark-it/src/test/java/org/apache/hadoop/hbase/spark/IntegrationTestSparkBulkLoad.java
+++ b/hbase-spark-it/src/test/java/org/apache/hadoop/hbase/spark/IntegrationTestSparkBulkLoad.java
@@ -33,8 +33,6 @@ import java.util.Set;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.lang3.RandomStringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
@@ -76,6 +74,8 @@ import org.apache.spark.api.java.function.PairFlatMapFunction;
import org.apache.spark.api.java.function.VoidFunction;
import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import scala.Tuple2;
@@ -93,7 +93,7 @@ import scala.Tuple2;
*/
public class IntegrationTestSparkBulkLoad extends IntegrationTestBase {
- private static final Log LOG = LogFactory.getLog(IntegrationTestSparkBulkLoad.class);
+ private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestSparkBulkLoad.class);
// The number of partitions for random generated data
private static String BULKLOAD_PARTITIONS_NUM = "hbase.spark.bulkload.partitionsnum";
diff --git a/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/SparkSQLPushDownFilter.java b/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/SparkSQLPushDownFilter.java
index a94c59c4c0..6643169aec 100644
--- a/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/SparkSQLPushDownFilter.java
+++ b/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/SparkSQLPushDownFilter.java
@@ -17,10 +17,10 @@
package org.apache.hadoop.hbase.spark;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.filter.FilterBase;
import org.apache.hadoop.hbase.spark.datasources.BytesEncoder;
@@ -46,7 +46,7 @@ import com.google.protobuf.ByteString;
*/
@InterfaceAudience.Private
public class SparkSQLPushDownFilter extends FilterBase{
- protected static final Log log = LogFactory.getLog(SparkSQLPushDownFilter.class);
+ protected static final Logger log = LoggerFactory.getLogger(SparkSQLPushDownFilter.class);
//The following values are populated with protobuffer
DynamicLogicExpression dynamicLogicExpression;
diff --git a/hbase-spark/src/test/java/org/apache/hadoop/hbase/spark/TestJavaHBaseContext.java b/hbase-spark/src/test/java/org/apache/hadoop/hbase/spark/TestJavaHBaseContext.java
index e383b5e0bf..f9a813dc29 100644
--- a/hbase-spark/src/test/java/org/apache/hadoop/hbase/spark/TestJavaHBaseContext.java
+++ b/hbase-spark/src/test/java/org/apache/hadoop/hbase/spark/TestJavaHBaseContext.java
@@ -24,8 +24,6 @@ import java.util.Iterator;
import java.util.HashMap;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
@@ -59,6 +57,8 @@ import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import scala.Tuple2;
import org.apache.hadoop.hbase.shaded.com.google.common.io.Files;
@@ -67,7 +67,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.io.Files;
public class TestJavaHBaseContext implements Serializable {
private transient JavaSparkContext jsc;
HBaseTestingUtility htu;
- protected static final Log LOG = LogFactory.getLog(TestJavaHBaseContext.class);
+ protected static final Logger LOG = LoggerFactory.getLogger(TestJavaHBaseContext.class);
@@ -189,6 +189,7 @@ public class TestJavaHBaseContext implements Serializable {
private static final long serialVersionUID = 1L;
+ @Override
public Put call(String v) throws Exception {
String[] cells = v.split(",");
Put put = new Put(Bytes.toBytes(cells[0]));
@@ -464,6 +465,7 @@ public class TestJavaHBaseContext implements Serializable {
private static final long serialVersionUID = 1L;
+ @Override
public Get call(byte[] v) throws Exception {
return new Get(v);
}
@@ -473,6 +475,7 @@ public class TestJavaHBaseContext implements Serializable {
private static final long serialVersionUID = 1L;
+ @Override
public String call(Result result) throws Exception {
Iterator it = result.listCells().iterator();
StringBuilder b = new StringBuilder();
diff --git a/hbase-testing-util/pom.xml b/hbase-testing-util/pom.xml
index 2841459a33..d6e28a7ee2 100644
--- a/hbase-testing-util/pom.xml
+++ b/hbase-testing-util/pom.xml
@@ -129,7 +129,6 @@
org.slf4jslf4j-log4j12
- ${slf4j.version}
diff --git a/hbase-thrift/pom.xml b/hbase-thrift/pom.xml
index 2b15f7ca29..15a7a0cf0c 100644
--- a/hbase-thrift/pom.xml
+++ b/hbase-thrift/pom.xml
@@ -210,8 +210,8 @@
hbase-shaded-miscellaneous
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apicommons-cli
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/CallQueue.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/CallQueue.java
index da09825c1a..82c9b5d607 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/CallQueue.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/CallQueue.java
@@ -25,9 +25,9 @@ import java.util.List;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A BlockingQueue reports waiting time in queue and queue length to
@@ -35,7 +35,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class CallQueue implements BlockingQueue {
- private static final Log LOG = LogFactory.getLog(CallQueue.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CallQueue.class);
private final BlockingQueue underlyingQueue;
private final ThriftMetrics metrics;
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HThreadedSelectorServerArgs.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HThreadedSelectorServerArgs.java
index 58146ca164..ad1384c605 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HThreadedSelectorServerArgs.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HThreadedSelectorServerArgs.java
@@ -21,19 +21,19 @@ package org.apache.hadoop.hbase.thrift;
import java.util.Locale;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.thrift.server.TThreadedSelectorServer;
import org.apache.thrift.transport.TNonblockingServerTransport;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A TThreadedSelectorServer.Args that reads hadoop configuration
*/
@InterfaceAudience.Private
public class HThreadedSelectorServerArgs extends TThreadedSelectorServer.Args {
- private static final Log LOG = LogFactory.getLog(TThreadedSelectorServer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TThreadedSelectorServer.class);
/**
* Number of selector threads for reading and writing socket
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HbaseHandlerMetricsProxy.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HbaseHandlerMetricsProxy.java
index 2fc6546692..46e394394a 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HbaseHandlerMetricsProxy.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HbaseHandlerMetricsProxy.java
@@ -23,11 +23,11 @@ import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.thrift.generated.Hbase;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Converts a Hbase.Iface using InvocationHandler so that it reports process
@@ -36,7 +36,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class HbaseHandlerMetricsProxy implements InvocationHandler {
- private static final Log LOG = LogFactory.getLog(
+ private static final Logger LOG = LoggerFactory.getLogger(
HbaseHandlerMetricsProxy.class);
private final Hbase.Iface handler;
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java
index 3f0530a338..60a8b7faac 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java
@@ -32,8 +32,6 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.LongAdder;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.thrift.ThriftServerRunner.HBaseHandler;
@@ -42,6 +40,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.metrics2.util.MBeans;
import org.apache.thrift.TException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This class will coalesce increments from a thift server if
@@ -161,7 +161,7 @@ public class IncrementCoalescer implements IncrementCoalescerMBean {
private int maxQueueSize = 500000;
private static final int CORE_POOL_SIZE = 1;
- private static final Log LOG = LogFactory.getLog(FullyQualifiedRow.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FullyQualifiedRow.class);
@SuppressWarnings("deprecation")
public IncrementCoalescer(HBaseHandler hand) {
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/TBoundedThreadPoolServer.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/TBoundedThreadPoolServer.java
index 56736ca552..d4cb8d62b3 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/TBoundedThreadPoolServer.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/TBoundedThreadPoolServer.java
@@ -25,8 +25,6 @@ import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.thrift.TException;
@@ -39,7 +37,8 @@ import org.apache.thrift.transport.TSocket;
import org.apache.thrift.transport.TTransport;
import org.apache.thrift.transport.TTransportException;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
/**
@@ -101,7 +100,7 @@ public class TBoundedThreadPoolServer extends TServer {
*/
public static final int TIME_TO_WAIT_AFTER_SHUTDOWN_MS = 5000;
- private static final Log LOG = LogFactory.getLog(
+ private static final Logger LOG = LoggerFactory.getLogger(
TBoundedThreadPoolServer.class.getName());
private final CallQueue callQueue;
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.java
index 784f9dca6f..4f55a01a08 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.java
@@ -25,8 +25,6 @@ import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.security.SecurityUtil;
import org.apache.hadoop.hbase.util.Base64;
@@ -43,6 +41,8 @@ import org.ietf.jgss.GSSException;
import org.ietf.jgss.GSSManager;
import org.ietf.jgss.GSSName;
import org.ietf.jgss.Oid;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Thrift Http Servlet is used for performing Kerberos authentication if security is enabled and
@@ -51,7 +51,7 @@ import org.ietf.jgss.Oid;
@InterfaceAudience.Private
public class ThriftHttpServlet extends TServlet {
private static final long serialVersionUID = 1L;
- private static final Log LOG = LogFactory.getLog(ThriftHttpServlet.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(ThriftHttpServlet.class.getName());
private transient final UserGroupInformation realUser;
private transient final Configuration conf;
private final boolean securityEnabled;
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java
index 8bf6409a95..cd1993de6d 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java
@@ -26,8 +26,6 @@ import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@@ -36,6 +34,8 @@ import org.apache.hadoop.hbase.thrift.ThriftServerRunner.ImplType;
import org.apache.hadoop.hbase.util.VersionInfo;
import org.apache.hadoop.util.Shell.ExitCodeException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* ThriftServer- this class starts up a Thrift server which implements the
@@ -45,7 +45,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
public class ThriftServer {
- private static final Log LOG = LogFactory.getLog(ThriftServer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ThriftServer.class);
private static final String MIN_WORKERS_OPTION = "minWorkers";
private static final String MAX_WORKERS_OPTION = "workers";
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
index 27850ef021..ba278474c7 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
@@ -48,8 +48,6 @@ import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionGroup;
import org.apache.commons.lang3.ArrayUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilder;
@@ -84,6 +82,7 @@ import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.ParseFilter;
import org.apache.hadoop.hbase.filter.PrefixFilter;
import org.apache.hadoop.hbase.filter.WhileMatchFilter;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.security.SaslUtil;
import org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
import org.apache.hadoop.hbase.security.SecurityUtil;
@@ -139,7 +138,8 @@ import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.eclipse.jetty.util.ssl.SslContextFactory;
import org.eclipse.jetty.util.thread.QueuedThreadPool;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
@@ -151,7 +151,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa
@InterfaceAudience.Private
public class ThriftServerRunner implements Runnable {
- private static final Log LOG = LogFactory.getLog(ThriftServerRunner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ThriftServerRunner.class);
private static final int DEFAULT_HTTP_MAX_HEADER_SIZE = 64 * 1024; // 64k
@@ -386,7 +386,7 @@ public class ThriftServerRunner implements Runnable {
tserver.serve();
}
} catch (Exception e) {
- LOG.fatal("Cannot run ThriftServer", e);
+ LOG.error(HBaseMarkers.FATAL, "Cannot run ThriftServer", e);
// Crash the process if the ThriftServer is not running
System.exit(-1);
}
@@ -714,7 +714,7 @@ public class ThriftServerRunner implements Runnable {
*/
public static class HBaseHandler implements Hbase.Iface {
protected Configuration conf;
- protected static final Log LOG = LogFactory.getLog(HBaseHandler.class);
+ protected static final Logger LOG = LoggerFactory.getLogger(HBaseHandler.class);
// nextScannerId and scannerMap are used to manage scanner state
protected int nextScannerId = 0;
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
index be74f0c451..3773fc661e 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
@@ -46,8 +46,6 @@ import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HRegionLocation;
@@ -73,6 +71,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ConnectionCache;
import org.apache.thrift.TException;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This class is a glue object that connects Thrift RPC calls to the HBase client API primarily
@@ -83,7 +83,7 @@ import org.apache.yetus.audience.InterfaceAudience;
public class ThriftHBaseServiceHandler implements THBaseService.Iface {
// TODO: Size of pool configuraple
- private static final Log LOG = LogFactory.getLog(ThriftHBaseServiceHandler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ThriftHBaseServiceHandler.class);
// nextScannerId and scannerMap are used to manage scanner state
// TODO: Cleanup thread for Scanners, Scanner id wrap
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java
index 4f75dbcdad..82bce15644 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java
@@ -44,8 +44,6 @@ import org.apache.commons.cli.OptionGroup;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -86,7 +84,8 @@ import org.apache.thrift.transport.TServerTransport;
import org.apache.thrift.transport.TTransportException;
import org.apache.thrift.transport.TTransportFactory;
import org.apache.yetus.audience.InterfaceAudience;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
/**
@@ -96,7 +95,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
@SuppressWarnings({ "rawtypes", "unchecked" })
public class ThriftServer extends Configured implements Tool {
- private static final Log log = LogFactory.getLog(ThriftServer.class);
+ private static final Logger log = LoggerFactory.getLogger(ThriftServer.class);
/**
* Thrift quality of protection configuration key. Valid values can be:
diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestCallQueue.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestCallQueue.java
index b5d0f04ea1..a13774db07 100644
--- a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestCallQueue.java
+++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestCallQueue.java
@@ -22,8 +22,6 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.concurrent.LinkedBlockingQueue;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -35,6 +33,8 @@ import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Unit testing for CallQueue, a part of the
@@ -44,7 +44,7 @@ import org.junit.runners.Parameterized.Parameters;
@RunWith(Parameterized.class)
public class TestCallQueue {
- private static final Log LOG = LogFactory.getLog(TestCallQueue.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestCallQueue.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final MetricsAssertHelper metricsHelper =
diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
index c5fff4ee2c..aeb24f79b2 100644
--- a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
+++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
@@ -23,8 +23,6 @@ import static org.junit.Assert.assertFalse;
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.testclassification.ClientTests;
@@ -43,7 +41,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.ExpectedException;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
/**
@@ -54,8 +53,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
public class TestThriftHttpServer {
- private static final Log LOG =
- LogFactory.getLog(TestThriftHttpServer.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestThriftHttpServer.class);
private static final HBaseTestingUtility TEST_UTIL =
new HBaseTestingUtility();
diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java
index d19e336269..5108eb3d11 100644
--- a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java
+++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java
@@ -32,8 +32,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CompatibilityFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -69,6 +67,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Unit testing for ThriftServerRunner.HBaseHandler, a part of the
@@ -77,7 +77,7 @@ import org.junit.rules.TestName;
@Category({ClientTests.class, LargeTests.class})
public class TestThriftServer {
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
- private static final Log LOG = LogFactory.getLog(TestThriftServer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestThriftServer.class);
private static final MetricsAssertHelper metricsHelper = CompatibilityFactory
.getInstance(MetricsAssertHelper.class);
protected static final int MAXVERSIONS = 3;
diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java
index 4d52576cce..818b162c47 100644
--- a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java
+++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java
@@ -26,8 +26,6 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.LargeTests;
@@ -50,7 +48,8 @@ import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
/**
@@ -61,8 +60,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
@RunWith(Parameterized.class)
public class TestThriftServerCmdLine {
- private static final Log LOG =
- LogFactory.getLog(TestThriftServerCmdLine.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestThriftServerCmdLine.class);
private final ImplType implType;
private boolean specifyFramed;
diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java
index e2e7580462..c1b523ac4e 100644
--- a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java
+++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java
@@ -47,8 +47,6 @@ import java.util.Optional;
import java.util.concurrent.TimeUnit;
import org.apache.commons.collections4.CollectionUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CompatibilityFactory;
@@ -105,7 +103,8 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
/**
@@ -115,7 +114,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@Category({ClientTests.class, MediumTests.class})
public class TestThriftHBaseServiceHandler {
- private static final Log LOG = LogFactory.getLog(TestThriftHBaseServiceHandler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestThriftHBaseServiceHandler.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
// Static names for tables, columns, rows, and values
@@ -1563,7 +1562,7 @@ public class TestThriftHBaseServiceHandler {
}
public static class DelayingRegionObserver implements RegionCoprocessor, RegionObserver {
- private static final Log LOG = LogFactory.getLog(DelayingRegionObserver.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DelayingRegionObserver.class);
// sleep time in msec
private long delayMillis;
diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandlerWithLabels.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandlerWithLabels.java
index 3e459a6866..89a48bd400 100644
--- a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandlerWithLabels.java
+++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandlerWithLabels.java
@@ -32,8 +32,6 @@ import java.util.Collections;
import java.util.Comparator;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -72,12 +70,14 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ClientTests.class, MediumTests.class})
public class TestThriftHBaseServiceHandlerWithLabels {
- private static final Log LOG = LogFactory
- .getLog(TestThriftHBaseServiceHandlerWithLabels.class);
+ private static final Logger LOG = LoggerFactory
+ .getLogger(TestThriftHBaseServiceHandlerWithLabels.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
// Static names for tables, columns, rows, and values
diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandlerWithReadOnly.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandlerWithReadOnly.java
index dfd244ba72..50f7a4b8f8 100644
--- a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandlerWithReadOnly.java
+++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandlerWithReadOnly.java
@@ -30,8 +30,6 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -64,8 +62,6 @@ import org.junit.experimental.categories.Category;
@Category({ClientTests.class, MediumTests.class})
public class TestThriftHBaseServiceHandlerWithReadOnly {
-
- private static final Log LOG = LogFactory.getLog(TestThriftHBaseServiceHandlerWithReadOnly.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
// Static names for tables, columns, rows, and values
diff --git a/hbase-zookeeper/pom.xml b/hbase-zookeeper/pom.xml
index d353bff0b1..08e215ca1c 100644
--- a/hbase-zookeeper/pom.xml
+++ b/hbase-zookeeper/pom.xml
@@ -213,8 +213,8 @@
commons-lang3
- commons-logging
- commons-logging
+ org.slf4j
+ slf4j-apilog4j
diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ClusterStatusTracker.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ClusterStatusTracker.java
index 4e9b34c576..730c099c65 100644
--- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ClusterStatusTracker.java
+++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ClusterStatusTracker.java
@@ -18,11 +18,11 @@
*/
package org.apache.hadoop.hbase.zookeeper;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Abortable;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos;
@@ -36,7 +36,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos;
*/
@InterfaceAudience.Private
public class ClusterStatusTracker extends ZKNodeTracker {
- private static final Log LOG = LogFactory.getLog(ClusterStatusTracker.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ClusterStatusTracker.class);
/**
* Creates a cluster status tracker.
diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/DeletionListener.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/DeletionListener.java
index 7c02891206..ed8751b7a9 100644
--- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/DeletionListener.java
+++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/DeletionListener.java
@@ -21,10 +21,10 @@ package org.apache.hadoop.hbase.zookeeper;
import java.util.concurrent.CountDownLatch;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A ZooKeeper watcher meant to detect deletions of ZNodes.
@@ -32,7 +32,7 @@ import org.apache.zookeeper.KeeperException;
@InterfaceAudience.Private
public class DeletionListener extends ZKListener {
- private static final Log LOG = LogFactory.getLog(DeletionListener.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DeletionListener.class);
private final String pathToWatch;
private final CountDownLatch deletedLatch;
diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/LoadBalancerTracker.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/LoadBalancerTracker.java
index cddce3319f..d3085b778f 100644
--- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/LoadBalancerTracker.java
+++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/LoadBalancerTracker.java
@@ -19,13 +19,13 @@ package org.apache.hadoop.hbase.zookeeper;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.LoadBalancerProtos;
@@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.LoadBalancerProtos;
*/
@InterfaceAudience.Private
public class LoadBalancerTracker extends ZKNodeTracker {
- private static final Log LOG = LogFactory.getLog(LoadBalancerTracker.class);
+ private static final Logger LOG = LoggerFactory.getLogger(LoadBalancerTracker.class);
public LoadBalancerTracker(ZKWatcher watcher,
Abortable abortable) {
diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java
index 6e231886ad..3d58c411e6 100644
--- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java
+++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java
@@ -29,8 +29,6 @@ import java.util.Collections;
import java.util.List;
import java.util.Locale;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.NotAllMetaRegionsOnlineException;
@@ -51,7 +49,8 @@ import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
@@ -77,7 +76,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaReg
*/
@InterfaceAudience.Private
public class MetaTableLocator {
- private static final Log LOG = LogFactory.getLog(MetaTableLocator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MetaTableLocator.class);
// only needed to allow non-timeout infinite waits to stop when cluster shuts down
private volatile boolean stopped = false;
diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/MiniZooKeeperCluster.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/MiniZooKeeperCluster.java
index a4acda2c2d..516ff9a65c 100644
--- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/MiniZooKeeperCluster.java
+++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/MiniZooKeeperCluster.java
@@ -32,14 +32,14 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.server.NIOServerCnxnFactory;
import org.apache.zookeeper.server.ZooKeeperServer;
import org.apache.zookeeper.server.persistence.FileTxnLog;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
@@ -50,7 +50,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
*/
@InterfaceAudience.Public
public class MiniZooKeeperCluster {
- private static final Log LOG = LogFactory.getLog(MiniZooKeeperCluster.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MiniZooKeeperCluster.class);
private static final int TICK_TIME = 2000;
private static final int DEFAULT_CONNECTION_TIMEOUT = 30000;
diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java
index 4b88a5de07..4c76a5cb68 100644
--- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java
+++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java
@@ -24,8 +24,6 @@ import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.trace.TraceUtil;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@@ -46,6 +44,8 @@ import org.apache.zookeeper.data.ACL;
import org.apache.zookeeper.data.Stat;
import org.apache.zookeeper.proto.CreateRequest;
import org.apache.zookeeper.proto.SetDataRequest;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* A zookeeper that can handle 'recoverable' errors.
@@ -72,7 +72,7 @@ import org.apache.zookeeper.proto.SetDataRequest;
*/
@InterfaceAudience.Private
public class RecoverableZooKeeper {
- private static final Log LOG = LogFactory.getLog(RecoverableZooKeeper.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RecoverableZooKeeper.class);
// the actual ZooKeeper client instance
private ZooKeeper zk;
private final RetryCounterFactory retryCounterFactory;
diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionNormalizerTracker.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionNormalizerTracker.java
index 58d405d557..a50ce4c8c6 100644
--- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionNormalizerTracker.java
+++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionNormalizerTracker.java
@@ -20,12 +20,12 @@ package org.apache.hadoop.hbase.zookeeper;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos;
@@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos;
* Tracks region normalizer state up in ZK
*/
public class RegionNormalizerTracker extends ZKNodeTracker {
- private static final Log LOG = LogFactory.getLog(RegionNormalizerTracker.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RegionNormalizerTracker.class);
public RegionNormalizerTracker(ZKWatcher watcher, Abortable abortable) {
super(watcher, watcher.znodePaths.regionNormalizerZNode, abortable);
diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKAclReset.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKAclReset.java
index c0f33e588b..50a6f5e78f 100644
--- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKAclReset.java
+++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKAclReset.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.zookeeper;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -31,6 +29,8 @@ import org.apache.hadoop.util.ToolRunner;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.ZooDefs;
import org.apache.zookeeper.ZooKeeper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* You may add the jaas.conf option
@@ -44,7 +44,7 @@ import org.apache.zookeeper.ZooKeeper;
*/
@InterfaceAudience.Private
public class ZKAclReset extends Configured implements Tool {
- private static final Log LOG = LogFactory.getLog(ZKAclReset.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ZKAclReset.class);
private static void resetAcls(final ZKWatcher zkw, final String znode,
final boolean eraseAcls) throws Exception {
diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKLeaderManager.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKLeaderManager.java
index 25310bd50c..5918e68764 100644
--- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKLeaderManager.java
+++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKLeaderManager.java
@@ -20,12 +20,12 @@ package org.apache.hadoop.hbase.zookeeper;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Handles coordination of a single "leader" instance among many possible
@@ -41,7 +41,7 @@ import org.apache.zookeeper.KeeperException;
@Deprecated
@InterfaceAudience.Private
public class ZKLeaderManager extends ZKListener {
- private static final Log LOG = LogFactory.getLog(ZKLeaderManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ZKLeaderManager.class);
private final Object lock = new Object();
private final AtomicBoolean leaderExists = new AtomicBoolean();
diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKNodeTracker.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKNodeTracker.java
index 2dd5df5080..f76947b2c8 100644
--- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKNodeTracker.java
+++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKNodeTracker.java
@@ -18,11 +18,11 @@
*/
package org.apache.hadoop.hbase.zookeeper;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Abortable;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tracks the availability and value of a single ZooKeeper node.
@@ -36,7 +36,7 @@ import org.apache.zookeeper.KeeperException;
@InterfaceAudience.Private
public abstract class ZKNodeTracker extends ZKListener {
// LOG is being used in subclasses, hence keeping it protected
- protected static final Log LOG = LogFactory.getLog(ZKNodeTracker.class);
+ protected static final Logger LOG = LoggerFactory.getLogger(ZKNodeTracker.class);
/** Path of node being tracked */
protected final String node;
diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKSplitLog.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKSplitLog.java
index da45f9dc92..b58c98cde6 100644
--- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKSplitLog.java
+++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKSplitLog.java
@@ -22,12 +22,12 @@ import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.net.URLEncoder;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Common methods and attributes used by SplitLogManager and SplitLogWorker running distributed
@@ -35,7 +35,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public final class ZKSplitLog {
- private static final Log LOG = LogFactory.getLog(ZKSplitLog.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ZKSplitLog.class);
private ZKSplitLog() {
}
diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
index a4edbb3507..3ef248d093 100644
--- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
+++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
@@ -39,8 +39,6 @@ import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag;
import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.AuthUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -73,6 +71,8 @@ import org.apache.zookeeper.proto.CreateRequest;
import org.apache.zookeeper.proto.DeleteRequest;
import org.apache.zookeeper.proto.SetDataRequest;
import org.apache.zookeeper.server.ZooKeeperSaslServer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -89,7 +89,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos;
*/
@InterfaceAudience.Private
public final class ZKUtil {
- private static final Log LOG = LogFactory.getLog(ZKUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ZKUtil.class);
private static int zkDumpConnectionTimeOut;
diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKWatcher.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKWatcher.java
index 5d48117bf1..b0c6a6f0c3 100644
--- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKWatcher.java
+++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKWatcher.java
@@ -27,8 +27,6 @@ import java.util.concurrent.CountDownLatch;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.AuthUtil;
@@ -44,6 +42,8 @@ import org.apache.zookeeper.ZooDefs.Perms;
import org.apache.zookeeper.data.ACL;
import org.apache.zookeeper.data.Id;
import org.apache.zookeeper.data.Stat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Acts as the single ZooKeeper Watcher. One instance of this is instantiated
@@ -58,7 +58,7 @@ import org.apache.zookeeper.data.Stat;
*/
@InterfaceAudience.Private
public class ZKWatcher implements Watcher, Abortable, Closeable {
- private static final Log LOG = LogFactory.getLog(ZKWatcher.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ZKWatcher.class);
// Identifier for this watcher (for logging only). It is made of the prefix
// passed on construction and the zookeeper sessionid.
diff --git a/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKLeaderManager.java b/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKLeaderManager.java
index fe282f5219..95ffbb437c 100644
--- a/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKLeaderManager.java
+++ b/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKLeaderManager.java
@@ -25,13 +25,12 @@ import static org.junit.Assert.fail;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseZKTestingUtility;
import org.apache.hadoop.hbase.Stoppable;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.ZKTests;
import org.apache.hadoop.hbase.util.Bytes;
@@ -39,10 +38,12 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ ZKTests.class, MediumTests.class })
public class TestZKLeaderManager {
- private static final Log LOG = LogFactory.getLog(TestZKLeaderManager.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestZKLeaderManager.class);
private static final String LEADER_ZNODE =
"/test/" + TestZKLeaderManager.class.getSimpleName();
@@ -53,7 +54,7 @@ public class TestZKLeaderManager {
@Override
public void abort(String why, Throwable e) {
aborted = true;
- LOG.fatal("Aborting during test: "+why, e);
+ LOG.error(HBaseMarkers.FATAL, "Aborting during test: "+why, e);
fail("Aborted during test: " + why);
}
@@ -91,6 +92,7 @@ public class TestZKLeaderManager {
return watcher;
}
+ @Override
public void run() {
while (!stopped) {
zkLeader.start();
diff --git a/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKMulti.java b/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKMulti.java
index 3cc3815894..e6fe810800 100644
--- a/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKMulti.java
+++ b/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKMulti.java
@@ -26,8 +26,6 @@ import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.HBaseZKTestingUtility;
@@ -43,13 +41,15 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test ZooKeeper multi-update functionality
*/
@Category({ ZKTests.class, MediumTests.class })
public class TestZKMulti {
- private static final Log LOG = LogFactory.getLog(TestZKMulti.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestZKMulti.class);
private final static HBaseZKTestingUtility TEST_UTIL = new HBaseZKTestingUtility();
private static ZKWatcher zkw = null;
diff --git a/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKNodeTracker.java b/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKNodeTracker.java
index f8aa7c3c66..3778ca0119 100644
--- a/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKNodeTracker.java
+++ b/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKNodeTracker.java
@@ -28,8 +28,6 @@ import java.io.IOException;
import java.util.concurrent.Semaphore;
import java.util.concurrent.ThreadLocalRandom;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.HBaseZKTestingUtility;
import org.apache.hadoop.hbase.HConstants;
@@ -45,10 +43,12 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Category({ ZKTests.class, MediumTests.class })
public class TestZKNodeTracker {
- private static final Log LOG = LogFactory.getLog(TestZKNodeTracker.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestZKNodeTracker.class);
private final static HBaseZKTestingUtility TEST_UTIL = new HBaseZKTestingUtility();
@BeforeClass
@@ -244,7 +244,7 @@ public class TestZKNodeTracker {
}
public static class TestingZKListener extends ZKListener {
- private static final Log LOG = LogFactory.getLog(TestingZKListener.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestingZKListener.class);
private Semaphore deletedLock;
private Semaphore createdLock;
diff --git a/pom.xml b/pom.xml
index fb8aefb9f4..e029b47dfb 100755
--- a/pom.xml
+++ b/pom.xml
@@ -1418,7 +1418,6 @@
2.53.6
- 1.23.6.13.3.6
@@ -1449,7 +1448,7 @@
thrift0.9.33.4.10
- 1.7.24
+ 1.7.254.0.32.4.11.3.8
@@ -1820,6 +1819,11 @@
jettison${jettison.version}
+
+ org.slf4j
+ slf4j-log4j12
+ ${slf4j.version}
+ log4jlog4j
@@ -1878,11 +1882,6 @@
commons-lang3${commons-lang3.version}
-
- commons-logging
- commons-logging
- ${commons-logging.version}
- org.apache.commonscommons-math3
--
2.15.1