commit 06e335f34f60c4b94c3feb1a8d9959f929eb4371
Author: Enis Soztutar
Date: Tue Mar 4 14:27:54 2014 -0800
HBASE-10671
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
index bf89fe2..c865a53 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
@@ -38,6 +38,7 @@ import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HConstants;
@@ -87,6 +88,7 @@ import com.google.common.annotations.VisibleForTesting;
* gets as well.
*
*/
+@InterfaceAudience.Private
class AsyncProcess {
private static final Log LOG = LogFactory.getLog(AsyncProcess.class);
protected static final AtomicLong COUNTER = new AtomicLong();
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/client/DelegatingRetryingCallable.java hbase-client/src/main/java/org/apache/hadoop/hbase/client/DelegatingRetryingCallable.java
index adfc49b..c229c21 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/client/DelegatingRetryingCallable.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/DelegatingRetryingCallable.java
@@ -19,6 +19,9 @@ package org.apache.hadoop.hbase.client;
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+
+@InterfaceAudience.Private
public class DelegatingRetryingCallable> implements
RetryingCallable {
protected final D delegate;
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedException.java hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedException.java
index 2bf16eb..efcb9ea 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedException.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedException.java
@@ -14,13 +14,12 @@
*/
package org.apache.hadoop.hbase.client;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-
import java.io.IOException;
import java.util.Date;
import java.util.List;
-import java.util.concurrent.Callable;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
/**
* Exception thrown by HTable methods when an attempt to do something (like
@@ -42,6 +41,7 @@ public class RetriesExhaustedException extends IOException {
/**
* Datastructure that allows adding more info around Throwable incident.
*/
+ @InterfaceAudience.Private
public static class ThrowableWithExtraContext {
private final Throwable t;
private final long when;
@@ -77,6 +77,7 @@ public class RetriesExhaustedException extends IOException {
* @param numTries
* @param exceptions List of exceptions that failed before giving up
*/
+ @InterfaceAudience.Private
public RetriesExhaustedException(final int numTries,
final List exceptions) {
super(getMessage(numTries, exceptions),
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerFactory.java hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerFactory.java
index 7957cc8..eec874a 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerFactory.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerFactory.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.hbase.client;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.util.ReflectionUtils;
@@ -24,6 +25,7 @@ import org.apache.hadoop.hbase.util.ReflectionUtils;
/**
* Factory to create an {@link RpcRetryingCaller}
*/
+@InterfaceAudience.Private
public class RpcRetryingCallerFactory {
/** Configuration key for a custom {@link RpcRetryingCaller} */
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/FailedSanityCheckException.java hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/FailedSanityCheckException.java
index 570eda2..0b8f910 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/FailedSanityCheckException.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/FailedSanityCheckException.java
@@ -17,9 +17,14 @@
*/
package org.apache.hadoop.hbase.exceptions;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
/**
* Exception thrown if a mutation fails sanity checks.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class FailedSanityCheckException extends org.apache.hadoop.hbase.DoNotRetryIOException {
private static final long serialVersionUID = 1788783640409186240L;
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/LockTimeoutException.java hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/LockTimeoutException.java
index 4d6d22a..77a437b 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/LockTimeoutException.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/LockTimeoutException.java
@@ -19,8 +19,15 @@
*/
package org.apache.hadoop.hbase.exceptions;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.DoNotRetryIOException;
+/**
+ * Thrown when there is a timeout when trying to acquire a lock
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class LockTimeoutException extends DoNotRetryIOException {
private static final long serialVersionUID = -1770764924258999825L;
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/UnknownProtocolException.java hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/UnknownProtocolException.java
index 59d0c69..570fa97 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/UnknownProtocolException.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/UnknownProtocolException.java
@@ -19,10 +19,15 @@
package org.apache.hadoop.hbase.exceptions;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
/**
* An error requesting an RPC protocol that the server is not serving.
*/
@SuppressWarnings("serial")
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public class UnknownProtocolException extends org.apache.hadoop.hbase.DoNotRetryIOException {
private Class> protocol;
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
index 003c2b7..3aa714e 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
@@ -27,6 +27,7 @@ import java.nio.ByteBuffer;
import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CellScanner;
@@ -49,6 +50,7 @@ import com.google.protobuf.Message;
/**
* Utility to help ipc'ing.
*/
+@InterfaceAudience.Private
class IPCUtil {
public static final Log LOG = LogFactory.getLog(IPCUtil.class);
/**
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcController.java hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcController.java
index cbf63fc..60e2dac 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcController.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcController.java
@@ -18,14 +18,16 @@
package org.apache.hadoop.hbase.ipc;
+import java.io.IOException;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.util.StringUtils;
+
import com.google.protobuf.Descriptors;
import com.google.protobuf.Message;
import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
import com.google.protobuf.Service;
-import org.apache.hadoop.util.StringUtils;
-
-import java.io.IOException;
/**
* Used for server-side protobuf RPC service invocations. This handler allows
@@ -50,6 +52,7 @@ import java.io.IOException;
*
*
*/
+@InterfaceAudience.Private
public class ServerRpcController implements RpcController {
/**
* The exception thrown within
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/TimeLimitedRpcController.java hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/TimeLimitedRpcController.java
index 5117388..14a00f8 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/TimeLimitedRpcController.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/TimeLimitedRpcController.java
@@ -19,9 +19,12 @@
package org.apache.hadoop.hbase.ipc;
+import org.apache.hadoop.classification.InterfaceAudience;
+
import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
+@InterfaceAudience.Private
public class TimeLimitedRpcController implements RpcController {
/**
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
index 1a0525e..82f8217 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
@@ -36,7 +36,7 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.NavigableSet;
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
@@ -136,6 +136,7 @@ import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.protobuf.ByteString;
+import com.google.protobuf.HBaseZeroCopyByteString;
import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.Message;
import com.google.protobuf.Parser;
@@ -147,6 +148,7 @@ import com.google.protobuf.TextFormat;
/**
* Protobufs utility.
*/
+@InterfaceAudience.Private // TODO: some clients (Hive, etc) use this class
public final class ProtobufUtil {
private ProtobufUtil() {
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationFactory.java hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationFactory.java
index 2af664e..66755f2 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationFactory.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationFactory.java
@@ -18,6 +18,7 @@
*/
package org.apache.hadoop.hbase.replication;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.Stoppable;
@@ -26,6 +27,7 @@ import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
/**
* A factory class for instantiating replication objects that deal with replication state.
*/
+@InterfaceAudience.Private
public class ReplicationFactory {
public static ReplicationQueues getReplicationQueues(final ZooKeeperWatcher zk,
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java
index fb09102..6258ef8 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java
@@ -30,6 +30,7 @@ import java.util.UUID;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.ServerName;
@@ -74,6 +75,7 @@ import com.google.protobuf.InvalidProtocolBufferException;
*
* /hbase/replication/peers/1/tableCFs [Value: "table1; table2:cf1,cf3; table3:cfx,cfy"]
*/
+@InterfaceAudience.Private
public class ReplicationPeersZKImpl extends ReplicationStateZKBase implements ReplicationPeers {
// Map of peer clusters keyed by their id
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueueInfo.java hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueueInfo.java
index 0664923..dbb29a1 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueueInfo.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueueInfo.java
@@ -19,19 +19,21 @@
package org.apache.hadoop.hbase.replication;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.ServerName;
-
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.ServerName;
+
/**
* This class is responsible for the parsing logic for a znode representing a queue.
* It will extract the peerId if it's recovered as well as the dead region servers
* that were part of the queue's history.
*/
+@InterfaceAudience.Private
public class ReplicationQueueInfo {
private static final Log LOG = LogFactory.getLog(ReplicationQueueInfo.class);
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClient.java hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClient.java
index 689afba..f4adfbf 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClient.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClient.java
@@ -20,11 +20,14 @@ package org.apache.hadoop.hbase.replication;
import java.util.List;
+import org.apache.hadoop.classification.InterfaceAudience;
+
/**
* This provides an interface for clients of replication to view replication queues. These queues
* keep track of the HLogs that still need to be replicated to remote clusters.
*/
+@InterfaceAudience.Private
public interface ReplicationQueuesClient {
/**
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
index 97457e4..32f88ef 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
@@ -20,12 +20,14 @@ package org.apache.hadoop.hbase.replication;
import java.util.List;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.zookeeper.KeeperException;
+@InterfaceAudience.Private
public class ReplicationQueuesClientZKImpl extends ReplicationStateZKBase implements
ReplicationQueuesClient {
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
index f5f437e..1ff11a4 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
@@ -27,6 +27,7 @@ import java.util.TreeSet;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.HConstants;
@@ -35,8 +36,8 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
-import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.hadoop.hbase.zookeeper.ZKUtil.ZKUtilOp;
+import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.zookeeper.KeeperException;
/**
@@ -60,6 +61,7 @@ import org.apache.zookeeper.KeeperException;
*
* /hbase/replication/rs/hostname.example.org,6020,1234/1/23522342.23422 [VALUE: 254]
*/
+@InterfaceAudience.Private
public class ReplicationQueuesZKImpl extends ReplicationStateZKBase implements ReplicationQueues {
/** Znode containing all replication queues for this region server. */
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationStateZKBase.java hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationStateZKBase.java
index 3e99687..fcc9957 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationStateZKBase.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationStateZKBase.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.replication;
import java.util.List;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
@@ -32,6 +33,7 @@ import org.apache.zookeeper.KeeperException;
/**
* This is a base class for maintaining replication state in zookeeper.
*/
+@InterfaceAudience.Private
public abstract class ReplicationStateZKBase {
/**
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java
index 5624ecc..69ea05f 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java
@@ -24,6 +24,7 @@ import java.util.concurrent.CopyOnWriteArrayList;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.Stoppable;
@@ -37,6 +38,7 @@ import org.apache.zookeeper.KeeperException;
* responsible for handling replication events that are defined in the ReplicationListener
* interface.
*/
+@InterfaceAudience.Private
public class ReplicationTrackerZKImpl extends ReplicationStateZKBase implements ReplicationTracker {
private static final Log LOG = LogFactory.getLog(ReplicationTrackerZKImpl.class);
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/security/AuthMethod.java hbase-client/src/main/java/org/apache/hadoop/hbase/security/AuthMethod.java
index c223fa5..3edd5fb 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/security/AuthMethod.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/AuthMethod.java
@@ -19,13 +19,15 @@
package org.apache.hadoop.hbase.security;
-import org.apache.hadoop.security.UserGroupInformation;
-
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.security.UserGroupInformation;
+
/** Authentication method */
+@InterfaceAudience.Private
public enum AuthMethod {
SIMPLE((byte) 80, "", UserGroupInformation.AuthenticationMethod.SIMPLE),
KERBEROS((byte) 81, "GSSAPI", UserGroupInformation.AuthenticationMethod.KERBEROS),
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java hbase-client/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java
index 9bbba53..bcc6478 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java
@@ -18,14 +18,13 @@
package org.apache.hadoop.hbase.security;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.io.WritableUtils;
-import org.apache.hadoop.ipc.RemoteException;
-import org.apache.hadoop.security.SaslInputStream;
-import org.apache.hadoop.security.SaslOutputStream;
-import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.security.token.TokenIdentifier;
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
@@ -38,13 +37,15 @@ import javax.security.sasl.Sasl;
import javax.security.sasl.SaslClient;
import javax.security.sasl.SaslException;
-import java.io.BufferedInputStream;
-import java.io.BufferedOutputStream;
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.io.WritableUtils;
+import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.security.SaslInputStream;
+import org.apache.hadoop.security.SaslOutputStream;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.TokenIdentifier;
import com.google.common.annotations.VisibleForTesting;
@@ -52,6 +53,7 @@ import com.google.common.annotations.VisibleForTesting;
* A utility class that encapsulates SASL logic for RPC client.
* Copied from org.apache.hadoop.security
*/
+@InterfaceAudience.Private
public class HBaseSaslRpcClient {
public static final Log LOG = LogFactory.getLog(HBaseSaslRpcClient.class);
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslStatus.java hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslStatus.java
index 95ac453..9cfb5cb 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslStatus.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslStatus.java
@@ -19,6 +19,9 @@
package org.apache.hadoop.hbase.security;
+import org.apache.hadoop.classification.InterfaceAudience;
+
+@InterfaceAudience.Private
public enum SaslStatus {
SUCCESS (0),
ERROR (1);
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
index 42e21be..4fe30da 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
@@ -18,11 +18,13 @@
*/
package org.apache.hadoop.hbase.security;
-import org.apache.commons.codec.binary.Base64;
-
import java.util.Map;
import java.util.TreeMap;
+import org.apache.commons.codec.binary.Base64;
+import org.apache.hadoop.classification.InterfaceAudience;
+
+@InterfaceAudience.Private
public class SaslUtil {
public static final String SASL_DEFAULT_REALM = "default";
public static final Map SASL_PROPS =
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/security/SecureBulkLoadUtil.java hbase-client/src/main/java/org/apache/hadoop/hbase/security/SecureBulkLoadUtil.java
index 41b5cb9..75ab68e 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/security/SecureBulkLoadUtil.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/SecureBulkLoadUtil.java
@@ -18,10 +18,12 @@
*/
package org.apache.hadoop.hbase.security;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.util.Bytes;
+@InterfaceAudience.Private
public class SecureBulkLoadUtil {
private final static String BULKLOAD_STAGING_DIR = "hbase.bulkload.staging.dir";
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/security/SecurityInfo.java hbase-client/src/main/java/org/apache/hadoop/hbase/security/SecurityInfo.java
index 0dbcd42..9f0f150 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/security/SecurityInfo.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/SecurityInfo.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.security;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos;
import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.Kind;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
@@ -29,6 +30,7 @@ import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos;
/**
* Maps RPC protocol interfaces to required configuration
*/
+@InterfaceAudience.Private
public class SecurityInfo {
/** Maps RPC service names to authentication information */
private static ConcurrentMap infos = new ConcurrentHashMap();
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java
index 3082526..cfa6b0c 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java
@@ -18,26 +18,34 @@
package org.apache.hadoop.hbase.security.access;
-import com.google.common.collect.Maps;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.io.VersionedWritable;
-
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Arrays;
import java.util.Map;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.io.VersionedWritable;
+
+import com.google.common.collect.Maps;
+
/**
* Base permissions instance representing the ability to perform a given set
* of actions.
*
* @see TablePermission
*/
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public class Permission extends VersionedWritable {
protected static final byte VERSION = 0;
+
+ @InterfaceAudience.Public
+ @InterfaceStability.Evolving
public enum Action {
READ('R'), WRITE('W'), EXEC('X'), CREATE('C'), ADMIN('A');
@@ -143,6 +151,7 @@ public class Permission extends VersionedWritable {
return result;
}
+ @Override
public String toString() {
StringBuilder str = new StringBuilder("[Permission: ")
.append("actions=");
@@ -162,6 +171,7 @@ public class Permission extends VersionedWritable {
}
/** @return the object version number */
+ @Override
public byte getVersion() {
return VERSION;
}
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/TablePermission.java hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/TablePermission.java
index 55ff81b..4954c17 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/TablePermission.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/TablePermission.java
@@ -18,22 +18,26 @@
package org.apache.hadoop.hbase.security.access;
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.util.Bytes;
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-
/**
* Represents an authorization for access for the given actions, optionally
* restricted to the given column family or column qualifier, over the
* given table. If the family property is null, it implies
* full table access.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public class TablePermission extends Permission {
private static Log LOG = LogFactory.getLog(TablePermission.class);
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/UserPermission.java hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/UserPermission.java
index eaaa824..bc54b8d 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/UserPermission.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/UserPermission.java
@@ -18,19 +18,23 @@
package org.apache.hadoop.hbase.security.access;
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.util.Bytes;
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-
/**
* Represents an authorization for access over the given table, column family
* plus qualifier, for the given user.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public class UserPermission extends TablePermission {
private static Log LOG = LogFactory.getLog(UserPermission.class);
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java
index 1445e04..df55f63 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java
@@ -18,19 +18,22 @@
package org.apache.hadoop.hbase.security.token;
-import com.google.protobuf.ByteString;
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.TokenIdentifier;
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
+import com.google.protobuf.ByteString;
/**
* Represents the identity information stored in an HBase authentication token.
*/
+@InterfaceAudience.Private
public class AuthenticationTokenIdentifier extends TokenIdentifier {
public static final Text AUTH_TOKEN_TYPE = new Text("HBASE_AUTH_TOKEN");
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSelector.java hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSelector.java
index 53e8fb7..870cacb 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSelector.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSelector.java
@@ -18,15 +18,17 @@
package org.apache.hadoop.hbase.security.token;
+import java.util.Collection;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.security.token.TokenSelector;
-import java.util.Collection;
-
+@InterfaceAudience.Private
public class AuthenticationTokenSelector
implements TokenSelector {
private static Log LOG = LogFactory.getLog(AuthenticationTokenSelector.class);
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
index 78cf048..e5bb8c6 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
@@ -35,7 +35,6 @@ import java.util.Properties;
import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag;
-import com.google.protobuf.HBaseZeroCopyByteString;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -48,7 +47,6 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionStoreSequenceIds;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.ExceptionUtil;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.zookeeper.ZKUtil.ZKUtilOp.CreateAndFailSilent;
import org.apache.hadoop.hbase.zookeeper.ZKUtil.ZKUtilOp.DeleteNodeFailSilent;
@@ -71,6 +69,7 @@ import org.apache.zookeeper.proto.DeleteRequest;
import org.apache.zookeeper.proto.SetDataRequest;
import org.apache.zookeeper.server.ZooKeeperSaslServer;
+import com.google.protobuf.HBaseZeroCopyByteString;
import com.google.protobuf.InvalidProtocolBufferException;
/**
diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/BaseConfigurable.java hbase-common/src/main/java/org/apache/hadoop/hbase/BaseConfigurable.java
index 8ad8584..a3d0414 100644
--- hbase-common/src/main/java/org/apache/hadoop/hbase/BaseConfigurable.java
+++ hbase-common/src/main/java/org/apache/hadoop/hbase/BaseConfigurable.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.hbase;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
@@ -26,6 +27,7 @@ import org.apache.hadoop.conf.Configuration;
* only sets the configuration through the {@link #setConf(Configuration)}
* method
*/
+@InterfaceAudience.Private
public class BaseConfigurable implements Configurable {
private Configuration conf;
diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
index 664ec6f..656d235 100644
--- hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
+++ hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
@@ -84,6 +84,7 @@ public final class HConstants {
/**
* Status codes used for return values of bulk operations.
*/
+ @InterfaceAudience.Private
public enum OperationStatusCode {
NOT_RUN,
SUCCESS,
@@ -520,6 +521,7 @@ public final class HConstants {
public static final String REGION_IMPL = "hbase.hregion.impl";
/** modifyTable op for replacing the table descriptor */
+ @InterfaceAudience.Private
public static enum Modify {
CLOSE_REGION,
TABLE_COMPACT,
diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/MetaMutationAnnotation.java hbase-common/src/main/java/org/apache/hadoop/hbase/MetaMutationAnnotation.java
index 1033f40..e197c01 100644
--- hbase-common/src/main/java/org/apache/hadoop/hbase/MetaMutationAnnotation.java
+++ hbase-common/src/main/java/org/apache/hadoop/hbase/MetaMutationAnnotation.java
@@ -25,6 +25,8 @@ import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
+import org.apache.hadoop.classification.InterfaceAudience;
+
/**
* The field or the parameter to which this annotation can be applied only when it
* holds mutations for hbase:meta table.
@@ -32,6 +34,7 @@ import java.lang.annotation.Target;
@Documented
@Target( { ElementType.LOCAL_VARIABLE, ElementType.PARAMETER })
@Retention(RetentionPolicy.CLASS)
+@InterfaceAudience.Private
public @interface MetaMutationAnnotation {
}
diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java hbase-common/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java
index 5ccb49e..29db2c0 100644
--- hbase-common/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java
+++ hbase-common/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java
@@ -18,10 +18,6 @@
package org.apache.hadoop.hbase;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.hbase.util.Bytes;
-
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
@@ -30,6 +26,10 @@ import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.hbase.util.Bytes;
+
/**
* Namespace POJO class. Used to represent and define namespaces.
*
@@ -161,6 +161,8 @@ public class NamespaceDescriptor {
return new Builder(ns);
}
+ @InterfaceAudience.Public
+ @InterfaceStability.Evolving
public static class Builder {
private String bName;
private Map bConfiguration = new TreeMap();
diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java
index 7a13392..ea77f78 100644
--- hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java
+++ hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java
@@ -18,9 +18,9 @@
package org.apache.hadoop.hbase.io;
-import java.io.IOException;
import java.io.DataInput;
import java.io.DataOutput;
+import java.io.IOException;
import java.util.Arrays;
import java.util.List;
@@ -228,6 +228,8 @@ implements WritableComparable {
/** A Comparator optimized for ImmutableBytesWritable.
*/
+ @InterfaceAudience.Public
+ @InterfaceStability.Stable
public static class Comparator extends WritableComparator {
private BytesWritable.Comparator comparator =
new BytesWritable.Comparator();
diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/io/LimitInputStream.java hbase-common/src/main/java/org/apache/hadoop/hbase/io/LimitInputStream.java
index 8a21f9e..c3a0915 100644
--- hbase-common/src/main/java/org/apache/hadoop/hbase/io/LimitInputStream.java
+++ hbase-common/src/main/java/org/apache/hadoop/hbase/io/LimitInputStream.java
@@ -19,18 +19,21 @@
package org.apache.hadoop.hbase.io;
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
-import static com.google.common.base.Preconditions.checkArgument;
-import static com.google.common.base.Preconditions.checkNotNull;
+import org.apache.hadoop.classification.InterfaceAudience;
/**
* Copied from guava source code v15 (LimitedInputStream)
* Guava deprecated LimitInputStream in v14 and removed it in v15. Copying this class here
* allows to be compatible with guava 11 to 15+.
*/
+@InterfaceAudience.Private
public final class LimitInputStream extends FilterInputStream {
private long left;
private long mark = -1;
diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java
index 8dffb66..f072b8d 100644
--- hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java
+++ hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java
@@ -52,6 +52,8 @@ public final class Encryption {
/**
* Crypto context
*/
+ @InterfaceAudience.Public
+ @InterfaceStability.Evolving
public static class Context extends org.apache.hadoop.hbase.io.crypto.Context {
/** The null crypto context */
diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java
index 9d04f3a..2daf825 100644
--- hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java
+++ hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.security;
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.BaseConfigurable;
import org.apache.hadoop.security.UserGroupInformation;
@@ -27,6 +28,7 @@ import org.apache.hadoop.util.ReflectionUtils;
/**
* Provide an instance of a user. Allows custom {@link User} creation.
*/
+@InterfaceAudience.Private
public class UserProvider extends BaseConfigurable {
private static final String USER_PROVIDER_CONF_KEY = "hbase.client.userprovider.class";
diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java
index 2a9e498..56943a3 100644
--- hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java
+++ hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java
@@ -19,11 +19,6 @@
package org.apache.hadoop.hbase.util;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.ByteArrayInputStream;
@@ -43,6 +38,11 @@ import java.io.UnsupportedEncodingException;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
/**
* Encodes and decodes to and from Base64 notation.
*
@@ -117,8 +117,7 @@ import java.util.zip.GZIPOutputStream;
*
* version: 2.2.1
*/
-@InterfaceAudience.Public
-@InterfaceStability.Stable
+@InterfaceAudience.Private
public class Base64 {
/* ******** P U B L I C F I E L D S ******** */
diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/util/ChecksumFactory.java hbase-common/src/main/java/org/apache/hadoop/hbase/util/ChecksumFactory.java
index 4fc09e1..c8f20bc 100644
--- hbase-common/src/main/java/org/apache/hadoop/hbase/util/ChecksumFactory.java
+++ hbase-common/src/main/java/org/apache/hadoop/hbase/util/ChecksumFactory.java
@@ -20,9 +20,10 @@
package org.apache.hadoop.hbase.util;
import java.io.IOException;
-import java.lang.ClassNotFoundException;
-import java.util.zip.Checksum;
import java.lang.reflect.Constructor;
+import java.util.zip.Checksum;
+
+import org.apache.hadoop.classification.InterfaceAudience;
/**
* Utility class that is used to generate a Checksum object.
@@ -30,6 +31,7 @@ import java.lang.reflect.Constructor;
* can specify their own class that implements their own
* Checksum algorithm.
*/
+@InterfaceAudience.Private
public class ChecksumFactory {
static private final Class>[] EMPTY_ARRAY = new Class[]{};
@@ -52,7 +54,7 @@ public class ChecksumFactory {
* @param className classname for which an constructor is created
* @return a new Constructor object
*/
- static public Constructor> newConstructor(String className)
+ static public Constructor> newConstructor(String className)
throws IOException {
try {
Class> clazz = getClassByName(className);
@@ -89,7 +91,7 @@ public class ChecksumFactory {
* @return the class object.
* @throws ClassNotFoundException if the class is not found.
*/
- static private Class> getClassByName(String name)
+ static private Class> getClassByName(String name)
throws ClassNotFoundException {
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
return Class.forName(name, true, classLoader);
diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/util/ChecksumType.java hbase-common/src/main/java/org/apache/hadoop/hbase/util/ChecksumType.java
index e160dac..77ab187 100644
--- hbase-common/src/main/java/org/apache/hadoop/hbase/util/ChecksumType.java
+++ hbase-common/src/main/java/org/apache/hadoop/hbase/util/ChecksumType.java
@@ -24,13 +24,15 @@ import java.util.zip.Checksum;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
/**
* Checksum types. The Checksum type is a one byte number
* that stores a representation of the checksum algorithm
- * used to encode a hfile. The ordinal of these cannot
+ * used to encode a hfile. The ordinal of these cannot
* change or else you risk breaking all existing HFiles out there.
*/
+@InterfaceAudience.Private
public enum ChecksumType {
NULL((byte)0) {
@@ -70,7 +72,7 @@ public enum ChecksumType {
LOG.trace(PURECRC32 + " not available.");
}
try {
- // The default checksum class name is java.util.zip.CRC32.
+ // The default checksum class name is java.util.zip.CRC32.
// This is available on all JVMs.
if (ctor == null) {
ctor = ChecksumFactory.newConstructor(JDKCRC);
diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java
index cf8c191..e3a666f 100644
--- hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java
+++ hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java
@@ -21,16 +21,18 @@ package org.apache.hadoop.hbase.util;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.Collection;
-import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
+import org.apache.hadoop.classification.InterfaceAudience;
+
/**
* A collection class that contains multiple sub-lists, which allows us to not copy lists.
* This class does not support modification. The derived classes that add modifications are
* not thread-safe.
* NOTE: Doesn't implement list as it is not necessary for current usage, feel free to add.
*/
+@InterfaceAudience.Private
public class ConcatenatedLists implements Collection {
protected final ArrayList> components = new ArrayList>();
protected int size = 0;
diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/util/ExceptionUtil.java hbase-common/src/main/java/org/apache/hadoop/hbase/util/ExceptionUtil.java
index 35bb259..39d44b4 100644
--- hbase-common/src/main/java/org/apache/hadoop/hbase/util/ExceptionUtil.java
+++ hbase-common/src/main/java/org/apache/hadoop/hbase/util/ExceptionUtil.java
@@ -20,7 +20,8 @@ package org.apache.hadoop.hbase.util;
import java.io.InterruptedIOException;
import java.net.SocketTimeoutException;
-import java.nio.channels.ClosedByInterruptException;
+
+import org.apache.hadoop.classification.InterfaceAudience;
/**
* This class handles the different interruption classes.
@@ -31,6 +32,7 @@ import java.nio.channels.ClosedByInterruptException;
* - SocketTimeoutException inherits InterruptedIOException but is not a real
* interruption, so we have to distinguish the case. This pattern is unfortunately common.
*/
+@InterfaceAudience.Private
public class ExceptionUtil {
/**
diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/util/MurmurHash3.java hbase-common/src/main/java/org/apache/hadoop/hbase/util/MurmurHash3.java
index cb96f3e..76862c2 100644
--- hbase-common/src/main/java/org/apache/hadoop/hbase/util/MurmurHash3.java
+++ hbase-common/src/main/java/org/apache/hadoop/hbase/util/MurmurHash3.java
@@ -18,6 +18,9 @@
*/
package org.apache.hadoop.hbase.util;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
/**
* This is a very fast, non-cryptographic hash suitable for general hash-based
* lookup. See http://code.google.com/p/smhasher/wiki/MurmurHash3 for details.
@@ -25,6 +28,8 @@ package org.apache.hadoop.hbase.util;
* MurmurHash3 is the successor to MurmurHash2. It comes in 3 variants, and
* the 32-bit version targets low latency for hash table use.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class MurmurHash3 extends Hash {
private static MurmurHash3 _instance = new MurmurHash3();
diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java
index 3282213..9e8e891 100644
--- hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java
+++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java
@@ -22,6 +22,7 @@ import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.zip.Checksum;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ChecksumType;
@@ -29,6 +30,7 @@ import org.apache.hadoop.hbase.util.ChecksumType;
/**
* Utility methods to compute and validate checksums.
*/
+@InterfaceAudience.Private
public class ChecksumUtil {
/** This is used to reserve space in a byte buffer */