diff --git bin/hbase bin/hbase
index 4eeb145..c9354b9 100755
--- bin/hbase
+++ bin/hbase
@@ -81,8 +81,10 @@ if [ $# = 0 ]; then
echo " zkcli run the ZooKeeper shell"
echo ""
echo "PROCESS MANAGEMENT"
- echo " master run an HBase HMaster node"
+ echo " master run an HBase HMaster node"
+ echo " nmaster run a Netty HBase HMaster node"
echo " regionserver run an HBase HRegionServer node"
+ echo " nregionserver run a Netty HBase HRegionServer node"
echo " zookeeper run a Zookeeper server"
echo " rest run an HBase REST server"
echo " thrift run the HBase Thrift server"
@@ -293,11 +295,21 @@ elif [ "$COMMAND" = "master" ] ; then
if [ "$1" != "stop" ] ; then
HBASE_OPTS="$HBASE_OPTS $HBASE_MASTER_OPTS"
fi
+elif [ "$COMMAND" = "nmaster" ] ; then
+ CLASS='org.hbase.server.master.NMasterServer'
+ if [ "$1" != "stop" ] ; then
+ HBASE_OPTS="$HBASE_OPTS $HBASE_MASTER_OPTS"
+ fi
elif [ "$COMMAND" = "regionserver" ] ; then
CLASS='org.apache.hadoop.hbase.regionserver.HRegionServer'
if [ "$1" != "stop" ] ; then
HBASE_OPTS="$HBASE_OPTS $HBASE_REGIONSERVER_OPTS"
fi
+elif [ "$COMMAND" = "nregionserver" ] ; then
+ CLASS='org.hbase.server.regionserver.NRegionServer'
+ if [ "$1" != "stop" ] ; then
+ HBASE_OPTS="$HBASE_OPTS $HBASE_REGIONSERVER_OPTS"
+ fi
elif [ "$COMMAND" = "thrift" ] ; then
CLASS='org.apache.hadoop.hbase.thrift.ThriftServer'
if [ "$1" != "stop" ] ; then
diff --git conf/log4j.properties conf/log4j.properties
index 5ed686a..0e5424d 100644
--- conf/log4j.properties
+++ conf/log4j.properties
@@ -78,6 +78,7 @@ log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}:
log4j.logger.org.apache.zookeeper=INFO
#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
log4j.logger.org.apache.hadoop.hbase=DEBUG
+log4j.logger.org.hbase=DEBUG
# Make these two classes INFO-level. Make them DEBUG to see more zk debug.
log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=INFO
log4j.logger.org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher=INFO
diff --git hbase-netty/pom.xml hbase-netty/pom.xml
new file mode 100644
index 0000000..1227120
--- /dev/null
+++ hbase-netty/pom.xml
@@ -0,0 +1,78 @@
+
+
+ 4.0.0
+
+ hbase
+ org.apache.hbase
+ 0.95-SNAPSHOT
+ ..
+
+
+ hbase-netty
+ HBase - Netty
+ Netty HBase server and client
+
+
+
+
+ maven-surefire-plugin
+
+
+
+ secondPartTestsExecution
+ test
+
+ test
+
+
+ true
+
+
+
+
+
+
+
+
+
+ org.apache.hbase
+ hbase-common
+
+
+ org.apache.hbase
+ hbase-server
+
+
+
+ com.google.inject
+ guice
+
+
+
+
+
+
+
diff --git hbase-netty/src/main/java/org/hbase/client/HBaseClient.java hbase-netty/src/main/java/org/hbase/client/HBaseClient.java
new file mode 100644
index 0000000..5986db8
--- /dev/null
+++ hbase-netty/src/main/java/org/hbase/client/HBaseClient.java
@@ -0,0 +1,12 @@
+package org.hbase.client;
+
+import org.hbase.client.async.AsyncHBaseClient;
+
+/**
+ *
+ */
+public class HBaseClient {
+
+ private AsyncHBaseClient asyncClient;
+
+}
diff --git hbase-netty/src/main/java/org/hbase/client/Request.java hbase-netty/src/main/java/org/hbase/client/Request.java
new file mode 100644
index 0000000..6fc904f
--- /dev/null
+++ hbase-netty/src/main/java/org/hbase/client/Request.java
@@ -0,0 +1,65 @@
+package org.hbase.client;
+
+import org.apache.hadoop.hbase.client.Operation;
+import org.apache.hadoop.hbase.client.Scan;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicLong;
+
+/**
+ *
+ */
+public class Request {
+
+ private static AtomicLong counter = new AtomicLong(0);
+
+ private Map> operations;
+
+ private long id = counter.getAndAdd(1);
+ int opCount = 0;
+
+ public Request() {
+ operations = new HashMap>();
+ }
+
+ public long getId() {
+ return id;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ Request request = (Request) o;
+
+ if (id != request.id) return false;
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return (int) (id ^ (id >>> 32));
+ }
+
+ public void addOperation(String tableName, Operation op) throws Exception {
+ if (opCount > 0) {
+ throw new RuntimeException("Can only handle a single scan operation");
+ }
+ opCount++;
+ List tableOps = operations.get(tableName);
+ if(tableOps == null) {
+ tableOps = new ArrayList();
+ operations.put(tableName, tableOps);
+ }
+ tableOps.add(op);
+ }
+
+ public Map> getOperations() {
+ return operations;
+ }
+}
diff --git hbase-netty/src/main/java/org/hbase/client/Response.java hbase-netty/src/main/java/org/hbase/client/Response.java
new file mode 100644
index 0000000..dd04da4
--- /dev/null
+++ hbase-netty/src/main/java/org/hbase/client/Response.java
@@ -0,0 +1,13 @@
+package org.hbase.client;
+
+/**
+ *
+ */
+public class Response {
+
+ private long requestId;
+
+ public Response(long requestId) {
+ this.requestId = requestId;
+ }
+}
diff --git hbase-netty/src/main/java/org/hbase/client/async/AsyncHBaseClient.java hbase-netty/src/main/java/org/hbase/client/async/AsyncHBaseClient.java
new file mode 100644
index 0000000..91279fc
--- /dev/null
+++ hbase-netty/src/main/java/org/hbase/client/async/AsyncHBaseClient.java
@@ -0,0 +1,112 @@
+package org.hbase.client.async;
+
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.client.Action;
+import org.apache.hadoop.hbase.client.Operation;
+import org.apache.hadoop.hbase.client.Row;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.protobuf.RequestConverter;
+import org.hbase.client.Request;
+import org.hbase.client.Response;
+import org.hbase.protobuf.generated.DataProtocol;
+import org.jboss.netty.bootstrap.ClientBootstrap;
+import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+
+/**
+ *
+ */
+public class AsyncHBaseClient {
+
+ private final ConcurrentHashMap regionClients =
+ new ConcurrentHashMap();
+ private ClientBootstrap clientBootstrap;
+ private AsyncRegionClient tempClient; //This is only used for testing until meta look ups work.
+
+ public AsyncHBaseClient() {
+ clientBootstrap = new ClientBootstrap(
+ new NioClientSocketChannelFactory(
+ Executors.newCachedThreadPool(),
+ Executors.newCachedThreadPool()));
+ clientBootstrap.setPipelineFactory(new HBaseClientPipelineFactory());
+ }
+
+ public Future request(Request request) throws Exception {
+
+ Map pRequests = getProtobufRequests(request);
+ for (Map.Entry entry : pRequests.entrySet()) {
+ AsyncRegionClient client = getRegionClient(entry.getKey());
+ client.enqueue(entry.getValue());
+ }
+ return null;
+ }
+
+ private Map getProtobufRequests(Request request)
+ throws Exception {
+ Map> operations = new HashMap>();
+
+ for (Map.Entry> entry : request.getOperations().entrySet()) {
+ for (Operation op : entry.getValue()) {
+
+ HRegionInfo hri = getHRegionInfo(entry.getKey(), op);
+ List regionOps = operations.get(hri);
+ if (regionOps == null) {
+ regionOps = new ArrayList(5);
+ operations.put(hri, regionOps);
+ }
+ regionOps.add(op);
+ }
+ }
+ Map requests =
+ new HashMap();
+ for (Map.Entry> entry : operations.entrySet()) {
+ DataProtocol.Request.Builder reqBuilder = DataProtocol.Request.newBuilder();
+
+ List regionActions = new ArrayList();
+ List regionScans = new ArrayList();
+ List regionOperations = entry.getValue();
+ for (int i = 0; i < regionOperations.size(); i++) {
+ Operation op = regionOperations.get(i);
+ if (op instanceof Row) {
+ regionActions.add(new Action((Row) op, i));
+ } else if (op instanceof Scan) {
+ regionScans.add((Scan) op);
+ }
+
+ }
+
+ if (regionActions.size() > 0) {
+ reqBuilder.setMultiRequest(
+ RequestConverter.buildMultiRequestUntyped(
+ entry.getKey().getRegionName(),
+ regionActions));
+ }
+
+ for (Scan s : regionScans) {
+ //TODO: Implement this.
+ }
+
+ requests.put(entry.getKey(), reqBuilder.build());
+
+ }
+ return requests;
+
+ }
+
+ private HRegionInfo getHRegionInfo(String key, Operation op) {
+ return new HRegionInfo(key.getBytes());
+ }
+
+ private AsyncRegionClient getRegionClient(HRegionInfo hri) {
+ return null;
+
+ }
+
+}
diff --git hbase-netty/src/main/java/org/hbase/client/async/AsyncRegionClient.java hbase-netty/src/main/java/org/hbase/client/async/AsyncRegionClient.java
new file mode 100644
index 0000000..48e5213
--- /dev/null
+++ hbase-netty/src/main/java/org/hbase/client/async/AsyncRegionClient.java
@@ -0,0 +1,46 @@
+package org.hbase.client.async;
+
+import org.hbase.protobuf.generated.DataProtocol;
+import org.jboss.netty.channel.ChannelFuture;
+import org.jboss.netty.channel.ChannelFutureListener;
+
+/**
+ *
+ */
+public class AsyncRegionClient {
+
+ ClientHandler handler = null;
+ ChannelFuture channelFuture;
+
+ public AsyncRegionClient(ChannelFuture channelFuture) {
+ this.channelFuture = channelFuture;
+ channelFuture.addListener(new ChannelFutureListener() {
+ @Override
+ public void operationComplete(ChannelFuture future) throws Exception {
+ handler = future.getChannel().getPipeline().get(ClientHandler.class);
+ }
+ });
+ }
+
+ public void enqueue(DataProtocol.Request pRequest) {
+ if (handler == null) {
+ this.channelFuture.addListener(new AsyncHBaseHandlerListner(pRequest));
+ } else {
+ this.handler.enqueue(pRequest);
+ }
+ }
+
+ public class AsyncHBaseHandlerListner implements ChannelFutureListener {
+
+ public AsyncHBaseHandlerListner(DataProtocol.Request pRequest) {
+ this.pRequest = pRequest;
+ }
+
+ DataProtocol.Request pRequest;
+
+ @Override
+ public void operationComplete(ChannelFuture future) throws Exception {
+ //To change body of implemented methods use File | Settings | File Templates.
+ }
+ }
+}
diff --git hbase-netty/src/main/java/org/hbase/client/async/ClientHandler.java hbase-netty/src/main/java/org/hbase/client/async/ClientHandler.java
new file mode 100644
index 0000000..94a1f5f
--- /dev/null
+++ hbase-netty/src/main/java/org/hbase/client/async/ClientHandler.java
@@ -0,0 +1,27 @@
+package org.hbase.client.async;
+
+import org.hbase.protobuf.generated.DataProtocol;
+import org.jboss.netty.channel.Channel;
+import org.jboss.netty.channel.ChannelHandlerContext;
+import org.jboss.netty.channel.ChannelStateEvent;
+import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
+
+/**
+ *
+ */
+public class ClientHandler extends SimpleChannelUpstreamHandler {
+
+ private volatile Channel channel;
+
+ public void enqueue(DataProtocol.Request request) {
+ channel.write(request);
+ }
+
+ @Override
+ public void channelOpen(ChannelHandlerContext ctx, ChannelStateEvent e)
+ throws Exception {
+ channel = e.getChannel();
+ super.channelOpen(ctx, e);
+ }
+
+}
diff --git hbase-netty/src/main/java/org/hbase/client/async/HBaseClientPipelineFactory.java hbase-netty/src/main/java/org/hbase/client/async/HBaseClientPipelineFactory.java
new file mode 100644
index 0000000..5494b28
--- /dev/null
+++ hbase-netty/src/main/java/org/hbase/client/async/HBaseClientPipelineFactory.java
@@ -0,0 +1,56 @@
+package org.hbase.client.async;
+
+import org.hbase.protobuf.generated.DataProtocol;
+import org.jboss.netty.channel.ChannelPipeline;
+import org.jboss.netty.channel.ChannelPipelineFactory;
+import org.jboss.netty.handler.codec.oneone.OneToOneDecoder;
+import org.jboss.netty.handler.codec.oneone.OneToOneEncoder;
+import org.jboss.netty.handler.codec.protobuf.ProtobufDecoder;
+import org.jboss.netty.handler.codec.protobuf.ProtobufEncoder;
+import org.jboss.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
+import org.jboss.netty.handler.codec.protobuf.ProtobufVarint32LengthFieldPrepender;
+import org.jboss.netty.handler.execution.ExecutionHandler;
+import org.jboss.netty.handler.execution.OrderedMemoryAwareThreadPoolExecutor;
+
+import java.util.concurrent.TimeUnit;
+
+import static org.jboss.netty.channel.Channels.pipeline;
+
+/**
+ *
+ */
+public class HBaseClientPipelineFactory implements ChannelPipelineFactory {
+
+ /** The decoder that will take bytes and give out protocol buf messages */
+ private OneToOneDecoder protoDecoder =
+ new ProtobufDecoder(DataProtocol.Response.getDefaultInstance());
+
+ /** The thread pool for hregions blocking reads/writes */
+ private OrderedMemoryAwareThreadPoolExecutor eventExecutor =
+ new OrderedMemoryAwareThreadPoolExecutor(45, 1000000, 10000000, 100, TimeUnit.MILLISECONDS);
+
+ /** The exectuion wrapper of the above thread pool */
+ private ExecutionHandler exHandler = new ExecutionHandler(eventExecutor);
+
+ /** The handler that will run on the above. */
+ private ClientHandler clientHandler = new ClientHandler();
+
+ /** Given a protobuf message give bytes back bytes. */
+ private OneToOneEncoder protoEncoder = new ProtobufEncoder();
+
+ /** Prepend the bytes with a length. */
+ private OneToOneEncoder frameEncoder =
+ new ProtobufVarint32LengthFieldPrepender();
+
+ @Override
+ public ChannelPipeline getPipeline() throws Exception {
+ ChannelPipeline p = pipeline();
+ p.addLast("frameDecoder", new ProtobufVarint32FrameDecoder());
+ p.addLast("protobufDecoder", protoDecoder);
+ p.addLast("frameEncoder", frameEncoder);
+ p.addLast("protobufEncoder", protoEncoder);
+ p.addLast("threadPoolExecutor", exHandler);
+ p.addLast("handler", clientHandler);
+ return p;
+ }
+}
diff --git hbase-netty/src/main/java/org/hbase/protobuf/generated/AdminProtocol.java hbase-netty/src/main/java/org/hbase/protobuf/generated/AdminProtocol.java
new file mode 100644
index 0000000..6c8b866
--- /dev/null
+++ hbase-netty/src/main/java/org/hbase/protobuf/generated/AdminProtocol.java
@@ -0,0 +1,825 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: AdminProtocol.proto
+
+package org.hbase.protobuf.generated;
+
+public final class AdminProtocol {
+ private AdminProtocol() {}
+ public static void registerAllExtensions(
+ com.google.protobuf.ExtensionRegistry registry) {
+ }
+ public interface RequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required uint64 requestId = 1;
+ boolean hasRequestId();
+ long getRequestId();
+ }
+ public static final class Request extends
+ com.google.protobuf.GeneratedMessage
+ implements RequestOrBuilder {
+ // Use Request.newBuilder() to construct.
+ private Request(Builder builder) {
+ super(builder);
+ }
+ private Request(boolean noInit) {}
+
+ private static final Request defaultInstance;
+ public static Request getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public Request getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.hbase.protobuf.generated.AdminProtocol.internal_static_Request_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.hbase.protobuf.generated.AdminProtocol.internal_static_Request_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required uint64 requestId = 1;
+ public static final int REQUESTID_FIELD_NUMBER = 1;
+ private long requestId_;
+ public boolean hasRequestId() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public long getRequestId() {
+ return requestId_;
+ }
+
+ private void initFields() {
+ requestId_ = 0L;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasRequestId()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeUInt64(1, requestId_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(1, requestId_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.hbase.protobuf.generated.AdminProtocol.Request)) {
+ return super.equals(obj);
+ }
+ org.hbase.protobuf.generated.AdminProtocol.Request other = (org.hbase.protobuf.generated.AdminProtocol.Request) obj;
+
+ boolean result = true;
+ result = result && (hasRequestId() == other.hasRequestId());
+ if (hasRequestId()) {
+ result = result && (getRequestId()
+ == other.getRequestId());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasRequestId()) {
+ hash = (37 * hash) + REQUESTID_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getRequestId());
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.hbase.protobuf.generated.AdminProtocol.Request parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.hbase.protobuf.generated.AdminProtocol.Request parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.hbase.protobuf.generated.AdminProtocol.Request parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.hbase.protobuf.generated.AdminProtocol.Request parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.hbase.protobuf.generated.AdminProtocol.Request parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.hbase.protobuf.generated.AdminProtocol.Request parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.hbase.protobuf.generated.AdminProtocol.Request parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.hbase.protobuf.generated.AdminProtocol.Request parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.hbase.protobuf.generated.AdminProtocol.Request parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.hbase.protobuf.generated.AdminProtocol.Request parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.hbase.protobuf.generated.AdminProtocol.Request prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.hbase.protobuf.generated.AdminProtocol.RequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.hbase.protobuf.generated.AdminProtocol.internal_static_Request_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.hbase.protobuf.generated.AdminProtocol.internal_static_Request_fieldAccessorTable;
+ }
+
+ // Construct using org.hbase.protobuf.generated.AdminProtocol.Request.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ requestId_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.hbase.protobuf.generated.AdminProtocol.Request.getDescriptor();
+ }
+
+ public org.hbase.protobuf.generated.AdminProtocol.Request getDefaultInstanceForType() {
+ return org.hbase.protobuf.generated.AdminProtocol.Request.getDefaultInstance();
+ }
+
+ public org.hbase.protobuf.generated.AdminProtocol.Request build() {
+ org.hbase.protobuf.generated.AdminProtocol.Request result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.hbase.protobuf.generated.AdminProtocol.Request buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.hbase.protobuf.generated.AdminProtocol.Request result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.hbase.protobuf.generated.AdminProtocol.Request buildPartial() {
+ org.hbase.protobuf.generated.AdminProtocol.Request result = new org.hbase.protobuf.generated.AdminProtocol.Request(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.requestId_ = requestId_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.hbase.protobuf.generated.AdminProtocol.Request) {
+ return mergeFrom((org.hbase.protobuf.generated.AdminProtocol.Request)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.hbase.protobuf.generated.AdminProtocol.Request other) {
+ if (other == org.hbase.protobuf.generated.AdminProtocol.Request.getDefaultInstance()) return this;
+ if (other.hasRequestId()) {
+ setRequestId(other.getRequestId());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasRequestId()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ requestId_ = input.readUInt64();
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required uint64 requestId = 1;
+ private long requestId_ ;
+ public boolean hasRequestId() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public long getRequestId() {
+ return requestId_;
+ }
+ public Builder setRequestId(long value) {
+ bitField0_ |= 0x00000001;
+ requestId_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearRequestId() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ requestId_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:Request)
+ }
+
+ static {
+ defaultInstance = new Request(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:Request)
+ }
+
+ public interface ResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required uint64 requestId = 1;
+ boolean hasRequestId();
+ long getRequestId();
+ }
+ public static final class Response extends
+ com.google.protobuf.GeneratedMessage
+ implements ResponseOrBuilder {
+ // Use Response.newBuilder() to construct.
+ private Response(Builder builder) {
+ super(builder);
+ }
+ private Response(boolean noInit) {}
+
+ private static final Response defaultInstance;
+ public static Response getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public Response getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.hbase.protobuf.generated.AdminProtocol.internal_static_Response_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.hbase.protobuf.generated.AdminProtocol.internal_static_Response_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required uint64 requestId = 1;
+ public static final int REQUESTID_FIELD_NUMBER = 1;
+ private long requestId_;
+ public boolean hasRequestId() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public long getRequestId() {
+ return requestId_;
+ }
+
+ private void initFields() {
+ requestId_ = 0L;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasRequestId()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeUInt64(1, requestId_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(1, requestId_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.hbase.protobuf.generated.AdminProtocol.Response)) {
+ return super.equals(obj);
+ }
+ org.hbase.protobuf.generated.AdminProtocol.Response other = (org.hbase.protobuf.generated.AdminProtocol.Response) obj;
+
+ boolean result = true;
+ result = result && (hasRequestId() == other.hasRequestId());
+ if (hasRequestId()) {
+ result = result && (getRequestId()
+ == other.getRequestId());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasRequestId()) {
+ hash = (37 * hash) + REQUESTID_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getRequestId());
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.hbase.protobuf.generated.AdminProtocol.Response parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.hbase.protobuf.generated.AdminProtocol.Response parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.hbase.protobuf.generated.AdminProtocol.Response parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.hbase.protobuf.generated.AdminProtocol.Response parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.hbase.protobuf.generated.AdminProtocol.Response parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.hbase.protobuf.generated.AdminProtocol.Response parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.hbase.protobuf.generated.AdminProtocol.Response parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.hbase.protobuf.generated.AdminProtocol.Response parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.hbase.protobuf.generated.AdminProtocol.Response parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.hbase.protobuf.generated.AdminProtocol.Response parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.hbase.protobuf.generated.AdminProtocol.Response prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.hbase.protobuf.generated.AdminProtocol.ResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.hbase.protobuf.generated.AdminProtocol.internal_static_Response_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.hbase.protobuf.generated.AdminProtocol.internal_static_Response_fieldAccessorTable;
+ }
+
+ // Construct using org.hbase.protobuf.generated.AdminProtocol.Response.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ requestId_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.hbase.protobuf.generated.AdminProtocol.Response.getDescriptor();
+ }
+
+ public org.hbase.protobuf.generated.AdminProtocol.Response getDefaultInstanceForType() {
+ return org.hbase.protobuf.generated.AdminProtocol.Response.getDefaultInstance();
+ }
+
+ public org.hbase.protobuf.generated.AdminProtocol.Response build() {
+ org.hbase.protobuf.generated.AdminProtocol.Response result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.hbase.protobuf.generated.AdminProtocol.Response buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.hbase.protobuf.generated.AdminProtocol.Response result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.hbase.protobuf.generated.AdminProtocol.Response buildPartial() {
+ org.hbase.protobuf.generated.AdminProtocol.Response result = new org.hbase.protobuf.generated.AdminProtocol.Response(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.requestId_ = requestId_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.hbase.protobuf.generated.AdminProtocol.Response) {
+ return mergeFrom((org.hbase.protobuf.generated.AdminProtocol.Response)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.hbase.protobuf.generated.AdminProtocol.Response other) {
+ if (other == org.hbase.protobuf.generated.AdminProtocol.Response.getDefaultInstance()) return this;
+ if (other.hasRequestId()) {
+ setRequestId(other.getRequestId());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasRequestId()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ requestId_ = input.readUInt64();
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required uint64 requestId = 1;
+ private long requestId_ ;
+ public boolean hasRequestId() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public long getRequestId() {
+ return requestId_;
+ }
+ public Builder setRequestId(long value) {
+ bitField0_ |= 0x00000001;
+ requestId_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearRequestId() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ requestId_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:Response)
+ }
+
+ static {
+ defaultInstance = new Response(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:Response)
+ }
+
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_Request_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_Request_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_Response_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_Response_fieldAccessorTable;
+
+ public static com.google.protobuf.Descriptors.FileDescriptor
+ getDescriptor() {
+ return descriptor;
+ }
+ private static com.google.protobuf.Descriptors.FileDescriptor
+ descriptor;
+ static {
+ java.lang.String[] descriptorData = {
+ "\n\023AdminProtocol.proto\032\014client.proto\"\034\n\007R" +
+ "equest\022\021\n\trequestId\030\001 \002(\004\"\035\n\010Response\022\021\n" +
+ "\trequestId\030\001 \002(\004B2\n\034org.hbase.protobuf.g" +
+ "eneratedB\rAdminProtocolH\001\240\001\001"
+ };
+ com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+ new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+ public com.google.protobuf.ExtensionRegistry assignDescriptors(
+ com.google.protobuf.Descriptors.FileDescriptor root) {
+ descriptor = root;
+ internal_static_Request_descriptor =
+ getDescriptor().getMessageTypes().get(0);
+ internal_static_Request_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_Request_descriptor,
+ new java.lang.String[] { "RequestId", },
+ org.hbase.protobuf.generated.AdminProtocol.Request.class,
+ org.hbase.protobuf.generated.AdminProtocol.Request.Builder.class);
+ internal_static_Response_descriptor =
+ getDescriptor().getMessageTypes().get(1);
+ internal_static_Response_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_Response_descriptor,
+ new java.lang.String[] { "RequestId", },
+ org.hbase.protobuf.generated.AdminProtocol.Response.class,
+ org.hbase.protobuf.generated.AdminProtocol.Response.Builder.class);
+ return null;
+ }
+ };
+ com.google.protobuf.Descriptors.FileDescriptor
+ .internalBuildGeneratedFileFrom(descriptorData,
+ new com.google.protobuf.Descriptors.FileDescriptor[] {
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(),
+ }, assigner);
+ }
+
+ // @@protoc_insertion_point(outer_class_scope)
+}
diff --git hbase-netty/src/main/java/org/hbase/protobuf/generated/DataProtocol.java hbase-netty/src/main/java/org/hbase/protobuf/generated/DataProtocol.java
new file mode 100644
index 0000000..6527599
--- /dev/null
+++ hbase-netty/src/main/java/org/hbase/protobuf/generated/DataProtocol.java
@@ -0,0 +1,1678 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: DataProtocol.proto
+
+package org.hbase.protobuf.generated;
+
+public final class DataProtocol {
+
+ private DataProtocol() {
+ }
+
+ public static void registerAllExtensions(
+ com.google.protobuf.ExtensionRegistry registry) {
+ }
+
+ public interface RequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required uint64 requestId = 1;
+ boolean hasRequestId();
+
+ long getRequestId();
+
+ // optional .MultiRequest multiRequest = 2;
+ boolean hasMultiRequest();
+
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest getMultiRequest();
+
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequestOrBuilder getMultiRequestOrBuilder();
+
+ // optional .ScanRequest scanRequest = 3;
+ boolean hasScanRequest();
+
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest getScanRequest();
+
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequestOrBuilder getScanRequestOrBuilder();
+ }
+
+ public static final class Request extends
+ com.google.protobuf.GeneratedMessage
+ implements RequestOrBuilder {
+
+ // Use Request.newBuilder() to construct.
+ private Request(Builder builder) {
+ super(builder);
+ }
+
+ private Request(boolean noInit) {
+ }
+
+ private static final Request defaultInstance;
+
+ public static Request getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public Request getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.hbase.protobuf.generated.DataProtocol.internal_static_Request_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.hbase.protobuf.generated.DataProtocol.internal_static_Request_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required uint64 requestId = 1;
+ public static final int REQUESTID_FIELD_NUMBER = 1;
+ private long requestId_;
+
+ public boolean hasRequestId() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+
+ public long getRequestId() {
+ return requestId_;
+ }
+
+ // optional .MultiRequest multiRequest = 2;
+ public static final int MULTIREQUEST_FIELD_NUMBER = 2;
+ private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest multiRequest_;
+
+ public boolean hasMultiRequest() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest getMultiRequest() {
+ return multiRequest_;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequestOrBuilder getMultiRequestOrBuilder() {
+ return multiRequest_;
+ }
+
+ // optional .ScanRequest scanRequest = 3;
+ public static final int SCANREQUEST_FIELD_NUMBER = 3;
+ private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest scanRequest_;
+
+ public boolean hasScanRequest() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest getScanRequest() {
+ return scanRequest_;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequestOrBuilder getScanRequestOrBuilder() {
+ return scanRequest_;
+ }
+
+ private void initFields() {
+ requestId_ = 0L;
+ multiRequest_ =
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance();
+ scanRequest_ =
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance();
+ }
+
+ private byte memoizedIsInitialized = -1;
+
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasRequestId()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (hasMultiRequest()) {
+ if (!getMultiRequest().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ if (hasScanRequest()) {
+ if (!getScanRequest().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeUInt64(1, requestId_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeMessage(2, multiRequest_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeMessage(3, scanRequest_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(1, requestId_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(2, multiRequest_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(3, scanRequest_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.hbase.protobuf.generated.DataProtocol.Request)) {
+ return super.equals(obj);
+ }
+ org.hbase.protobuf.generated.DataProtocol.Request other =
+ (org.hbase.protobuf.generated.DataProtocol.Request) obj;
+
+ boolean result = true;
+ result = result && (hasRequestId() == other.hasRequestId());
+ if (hasRequestId()) {
+ result = result && (getRequestId()
+ == other.getRequestId());
+ }
+ result = result && (hasMultiRequest() == other.hasMultiRequest());
+ if (hasMultiRequest()) {
+ result = result && getMultiRequest()
+ .equals(other.getMultiRequest());
+ }
+ result = result && (hasScanRequest() == other.hasScanRequest());
+ if (hasScanRequest()) {
+ result = result && getScanRequest()
+ .equals(other.getScanRequest());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasRequestId()) {
+ hash = (37 * hash) + REQUESTID_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getRequestId());
+ }
+ if (hasMultiRequest()) {
+ hash = (37 * hash) + MULTIREQUEST_FIELD_NUMBER;
+ hash = (53 * hash) + getMultiRequest().hashCode();
+ }
+ if (hasScanRequest()) {
+ hash = (37 * hash) + SCANREQUEST_FIELD_NUMBER;
+ hash = (53 * hash) + getScanRequest().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.hbase.protobuf.generated.DataProtocol.Request parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+
+ public static org.hbase.protobuf.generated.DataProtocol.Request parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static org.hbase.protobuf.generated.DataProtocol.Request parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+
+ public static org.hbase.protobuf.generated.DataProtocol.Request parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static org.hbase.protobuf.generated.DataProtocol.Request parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+
+ public static org.hbase.protobuf.generated.DataProtocol.Request parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static org.hbase.protobuf.generated.DataProtocol.Request parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+
+ public static org.hbase.protobuf.generated.DataProtocol.Request parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+
+ public static org.hbase.protobuf.generated.DataProtocol.Request parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+
+ public static org.hbase.protobuf.generated.DataProtocol.Request parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() {
+ return Builder.create();
+ }
+
+ public Builder newBuilderForType() {
+ return newBuilder();
+ }
+
+ public static Builder newBuilder(org.hbase.protobuf.generated.DataProtocol.Request prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+
+ public Builder toBuilder() {
+ return newBuilder(this);
+ }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.hbase.protobuf.generated.DataProtocol.RequestOrBuilder {
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.hbase.protobuf.generated.DataProtocol.internal_static_Request_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.hbase.protobuf.generated.DataProtocol.internal_static_Request_fieldAccessorTable;
+ }
+
+ // Construct using org.hbase.protobuf.generated.DataProtocol.Request.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getMultiRequestFieldBuilder();
+ getScanRequestFieldBuilder();
+ }
+ }
+
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ requestId_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ if (multiRequestBuilder_ == null) {
+ multiRequest_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest
+ .getDefaultInstance();
+ } else {
+ multiRequestBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000002);
+ if (scanRequestBuilder_ == null) {
+ scanRequest_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest
+ .getDefaultInstance();
+ } else {
+ scanRequestBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000004);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.hbase.protobuf.generated.DataProtocol.Request.getDescriptor();
+ }
+
+ public org.hbase.protobuf.generated.DataProtocol.Request getDefaultInstanceForType() {
+ return org.hbase.protobuf.generated.DataProtocol.Request.getDefaultInstance();
+ }
+
+ public org.hbase.protobuf.generated.DataProtocol.Request build() {
+ org.hbase.protobuf.generated.DataProtocol.Request result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.hbase.protobuf.generated.DataProtocol.Request buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.hbase.protobuf.generated.DataProtocol.Request result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.hbase.protobuf.generated.DataProtocol.Request buildPartial() {
+ org.hbase.protobuf.generated.DataProtocol.Request result =
+ new org.hbase.protobuf.generated.DataProtocol.Request(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.requestId_ = requestId_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ if (multiRequestBuilder_ == null) {
+ result.multiRequest_ = multiRequest_;
+ } else {
+ result.multiRequest_ = multiRequestBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ if (scanRequestBuilder_ == null) {
+ result.scanRequest_ = scanRequest_;
+ } else {
+ result.scanRequest_ = scanRequestBuilder_.build();
+ }
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.hbase.protobuf.generated.DataProtocol.Request) {
+ return mergeFrom((org.hbase.protobuf.generated.DataProtocol.Request) other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.hbase.protobuf.generated.DataProtocol.Request other) {
+ if (other == org.hbase.protobuf.generated.DataProtocol.Request.getDefaultInstance()) {
+ return this;
+ }
+ if (other.hasRequestId()) {
+ setRequestId(other.getRequestId());
+ }
+ if (other.hasMultiRequest()) {
+ mergeMultiRequest(other.getMultiRequest());
+ }
+ if (other.hasScanRequest()) {
+ mergeScanRequest(other.getScanRequest());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasRequestId()) {
+
+ return false;
+ }
+ if (hasMultiRequest()) {
+ if (!getMultiRequest().isInitialized()) {
+
+ return false;
+ }
+ }
+ if (hasScanRequest()) {
+ if (!getScanRequest().isInitialized()) {
+
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ requestId_ = input.readUInt64();
+ break;
+ }
+ case 18: {
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.Builder
+ subBuilder =
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.newBuilder();
+ if (hasMultiRequest()) {
+ subBuilder.mergeFrom(getMultiRequest());
+ }
+ input.readMessage(subBuilder, extensionRegistry);
+ setMultiRequest(subBuilder.buildPartial());
+ break;
+ }
+ case 26: {
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.Builder
+ subBuilder =
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.newBuilder();
+ if (hasScanRequest()) {
+ subBuilder.mergeFrom(getScanRequest());
+ }
+ input.readMessage(subBuilder, extensionRegistry);
+ setScanRequest(subBuilder.buildPartial());
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required uint64 requestId = 1;
+ private long requestId_;
+
+ public boolean hasRequestId() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+
+ public long getRequestId() {
+ return requestId_;
+ }
+
+ public Builder setRequestId(long value) {
+ bitField0_ |= 0x00000001;
+ requestId_ = value;
+ onChanged();
+ return this;
+ }
+
+ public Builder clearRequestId() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ requestId_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional .MultiRequest multiRequest = 2;
+ private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest multiRequest_ =
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequestOrBuilder>
+ multiRequestBuilder_;
+
+ public boolean hasMultiRequest() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest getMultiRequest() {
+ if (multiRequestBuilder_ == null) {
+ return multiRequest_;
+ } else {
+ return multiRequestBuilder_.getMessage();
+ }
+ }
+
+ public Builder setMultiRequest(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest value) {
+ if (multiRequestBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ multiRequest_ = value;
+ onChanged();
+ } else {
+ multiRequestBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+
+ public Builder setMultiRequest(
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.Builder builderForValue) {
+ if (multiRequestBuilder_ == null) {
+ multiRequest_ = builderForValue.build();
+ onChanged();
+ } else {
+ multiRequestBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+
+ public Builder mergeMultiRequest(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest value) {
+ if (multiRequestBuilder_ == null) {
+ if (((bitField0_ & 0x00000002) == 0x00000002) &&
+ multiRequest_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest
+ .getDefaultInstance()) {
+ multiRequest_ =
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest
+ .newBuilder(multiRequest_)
+ .mergeFrom(value)
+ .buildPartial();
+ } else {
+ multiRequest_ = value;
+ }
+ onChanged();
+ } else {
+ multiRequestBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+
+ public Builder clearMultiRequest() {
+ if (multiRequestBuilder_ == null) {
+ multiRequest_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest
+ .getDefaultInstance();
+ onChanged();
+ } else {
+ multiRequestBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.Builder getMultiRequestBuilder() {
+ bitField0_ |= 0x00000002;
+ onChanged();
+ return getMultiRequestFieldBuilder().getBuilder();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequestOrBuilder getMultiRequestOrBuilder() {
+ if (multiRequestBuilder_ != null) {
+ return multiRequestBuilder_.getMessageOrBuilder();
+ } else {
+ return multiRequest_;
+ }
+ }
+
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequestOrBuilder>
+ getMultiRequestFieldBuilder() {
+ if (multiRequestBuilder_ == null) {
+ multiRequestBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequestOrBuilder>(
+ multiRequest_,
+ getParentForChildren(),
+ isClean());
+ multiRequest_ = null;
+ }
+ return multiRequestBuilder_;
+ }
+
+ // optional .ScanRequest scanRequest = 3;
+ private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest scanRequest_ =
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequestOrBuilder>
+ scanRequestBuilder_;
+
+ public boolean hasScanRequest() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest getScanRequest() {
+ if (scanRequestBuilder_ == null) {
+ return scanRequest_;
+ } else {
+ return scanRequestBuilder_.getMessage();
+ }
+ }
+
+ public Builder setScanRequest(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest value) {
+ if (scanRequestBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ scanRequest_ = value;
+ onChanged();
+ } else {
+ scanRequestBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000004;
+ return this;
+ }
+
+ public Builder setScanRequest(
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.Builder builderForValue) {
+ if (scanRequestBuilder_ == null) {
+ scanRequest_ = builderForValue.build();
+ onChanged();
+ } else {
+ scanRequestBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000004;
+ return this;
+ }
+
+ public Builder mergeScanRequest(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest value) {
+ if (scanRequestBuilder_ == null) {
+ if (((bitField0_ & 0x00000004) == 0x00000004) &&
+ scanRequest_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest
+ .getDefaultInstance()) {
+ scanRequest_ =
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest
+ .newBuilder(scanRequest_)
+ .mergeFrom(value)
+ .buildPartial();
+ } else {
+ scanRequest_ = value;
+ }
+ onChanged();
+ } else {
+ scanRequestBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000004;
+ return this;
+ }
+
+ public Builder clearScanRequest() {
+ if (scanRequestBuilder_ == null) {
+ scanRequest_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest
+ .getDefaultInstance();
+ onChanged();
+ } else {
+ scanRequestBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000004);
+ return this;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.Builder getScanRequestBuilder() {
+ bitField0_ |= 0x00000004;
+ onChanged();
+ return getScanRequestFieldBuilder().getBuilder();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequestOrBuilder getScanRequestOrBuilder() {
+ if (scanRequestBuilder_ != null) {
+ return scanRequestBuilder_.getMessageOrBuilder();
+ } else {
+ return scanRequest_;
+ }
+ }
+
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequestOrBuilder>
+ getScanRequestFieldBuilder() {
+ if (scanRequestBuilder_ == null) {
+ scanRequestBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequestOrBuilder>(
+ scanRequest_,
+ getParentForChildren(),
+ isClean());
+ scanRequest_ = null;
+ }
+ return scanRequestBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:Request)
+ }
+
+ static {
+ defaultInstance = new Request(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:Request)
+ }
+
+ public interface ResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required uint64 requestId = 1;
+ boolean hasRequestId();
+
+ long getRequestId();
+
+ // optional .MultiResponse multiResponse = 2;
+ boolean hasMultiResponse();
+
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse getMultiResponse();
+
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponseOrBuilder getMultiResponseOrBuilder();
+
+ // optional .ScanRequest scanRequest = 3;
+ boolean hasScanRequest();
+
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest getScanRequest();
+
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequestOrBuilder getScanRequestOrBuilder();
+ }
+
+ public static final class Response extends
+ com.google.protobuf.GeneratedMessage
+ implements ResponseOrBuilder {
+
+ // Use Response.newBuilder() to construct.
+ private Response(Builder builder) {
+ super(builder);
+ }
+
+ private Response(boolean noInit) {
+ }
+
+ private static final Response defaultInstance;
+
+ public static Response getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public Response getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.hbase.protobuf.generated.DataProtocol.internal_static_Response_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.hbase.protobuf.generated.DataProtocol.internal_static_Response_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required uint64 requestId = 1;
+ public static final int REQUESTID_FIELD_NUMBER = 1;
+ private long requestId_;
+
+ public boolean hasRequestId() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+
+ public long getRequestId() {
+ return requestId_;
+ }
+
+ // optional .MultiResponse multiResponse = 2;
+ public static final int MULTIRESPONSE_FIELD_NUMBER = 2;
+ private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse multiResponse_;
+
+ public boolean hasMultiResponse() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse getMultiResponse() {
+ return multiResponse_;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponseOrBuilder getMultiResponseOrBuilder() {
+ return multiResponse_;
+ }
+
+ // optional .ScanRequest scanRequest = 3;
+ public static final int SCANREQUEST_FIELD_NUMBER = 3;
+ private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest scanRequest_;
+
+ public boolean hasScanRequest() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest getScanRequest() {
+ return scanRequest_;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequestOrBuilder getScanRequestOrBuilder() {
+ return scanRequest_;
+ }
+
+ private void initFields() {
+ requestId_ = 0L;
+ multiResponse_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse
+ .getDefaultInstance();
+ scanRequest_ =
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance();
+ }
+
+ private byte memoizedIsInitialized = -1;
+
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasRequestId()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (hasMultiResponse()) {
+ if (!getMultiResponse().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ if (hasScanRequest()) {
+ if (!getScanRequest().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeUInt64(1, requestId_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeMessage(2, multiResponse_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeMessage(3, scanRequest_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(1, requestId_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(2, multiResponse_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(3, scanRequest_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.hbase.protobuf.generated.DataProtocol.Response)) {
+ return super.equals(obj);
+ }
+ org.hbase.protobuf.generated.DataProtocol.Response other =
+ (org.hbase.protobuf.generated.DataProtocol.Response) obj;
+
+ boolean result = true;
+ result = result && (hasRequestId() == other.hasRequestId());
+ if (hasRequestId()) {
+ result = result && (getRequestId()
+ == other.getRequestId());
+ }
+ result = result && (hasMultiResponse() == other.hasMultiResponse());
+ if (hasMultiResponse()) {
+ result = result && getMultiResponse()
+ .equals(other.getMultiResponse());
+ }
+ result = result && (hasScanRequest() == other.hasScanRequest());
+ if (hasScanRequest()) {
+ result = result && getScanRequest()
+ .equals(other.getScanRequest());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasRequestId()) {
+ hash = (37 * hash) + REQUESTID_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getRequestId());
+ }
+ if (hasMultiResponse()) {
+ hash = (37 * hash) + MULTIRESPONSE_FIELD_NUMBER;
+ hash = (53 * hash) + getMultiResponse().hashCode();
+ }
+ if (hasScanRequest()) {
+ hash = (37 * hash) + SCANREQUEST_FIELD_NUMBER;
+ hash = (53 * hash) + getScanRequest().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.hbase.protobuf.generated.DataProtocol.Response parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+
+ public static org.hbase.protobuf.generated.DataProtocol.Response parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static org.hbase.protobuf.generated.DataProtocol.Response parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+
+ public static org.hbase.protobuf.generated.DataProtocol.Response parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static org.hbase.protobuf.generated.DataProtocol.Response parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+
+ public static org.hbase.protobuf.generated.DataProtocol.Response parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static org.hbase.protobuf.generated.DataProtocol.Response parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+
+ public static org.hbase.protobuf.generated.DataProtocol.Response parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+
+ public static org.hbase.protobuf.generated.DataProtocol.Response parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+
+ public static org.hbase.protobuf.generated.DataProtocol.Response parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() {
+ return Builder.create();
+ }
+
+ public Builder newBuilderForType() {
+ return newBuilder();
+ }
+
+ public static Builder newBuilder(org.hbase.protobuf.generated.DataProtocol.Response prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+
+ public Builder toBuilder() {
+ return newBuilder(this);
+ }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.hbase.protobuf.generated.DataProtocol.ResponseOrBuilder {
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.hbase.protobuf.generated.DataProtocol.internal_static_Response_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.hbase.protobuf.generated.DataProtocol.internal_static_Response_fieldAccessorTable;
+ }
+
+ // Construct using org.hbase.protobuf.generated.DataProtocol.Response.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getMultiResponseFieldBuilder();
+ getScanRequestFieldBuilder();
+ }
+ }
+
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ requestId_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ if (multiResponseBuilder_ == null) {
+ multiResponse_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse
+ .getDefaultInstance();
+ } else {
+ multiResponseBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000002);
+ if (scanRequestBuilder_ == null) {
+ scanRequest_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest
+ .getDefaultInstance();
+ } else {
+ scanRequestBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000004);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.hbase.protobuf.generated.DataProtocol.Response.getDescriptor();
+ }
+
+ public org.hbase.protobuf.generated.DataProtocol.Response getDefaultInstanceForType() {
+ return org.hbase.protobuf.generated.DataProtocol.Response.getDefaultInstance();
+ }
+
+ public org.hbase.protobuf.generated.DataProtocol.Response build() {
+ org.hbase.protobuf.generated.DataProtocol.Response result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.hbase.protobuf.generated.DataProtocol.Response buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.hbase.protobuf.generated.DataProtocol.Response result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.hbase.protobuf.generated.DataProtocol.Response buildPartial() {
+ org.hbase.protobuf.generated.DataProtocol.Response result =
+ new org.hbase.protobuf.generated.DataProtocol.Response(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.requestId_ = requestId_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ if (multiResponseBuilder_ == null) {
+ result.multiResponse_ = multiResponse_;
+ } else {
+ result.multiResponse_ = multiResponseBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ if (scanRequestBuilder_ == null) {
+ result.scanRequest_ = scanRequest_;
+ } else {
+ result.scanRequest_ = scanRequestBuilder_.build();
+ }
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.hbase.protobuf.generated.DataProtocol.Response) {
+ return mergeFrom((org.hbase.protobuf.generated.DataProtocol.Response) other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.hbase.protobuf.generated.DataProtocol.Response other) {
+ if (other == org.hbase.protobuf.generated.DataProtocol.Response.getDefaultInstance()) {
+ return this;
+ }
+ if (other.hasRequestId()) {
+ setRequestId(other.getRequestId());
+ }
+ if (other.hasMultiResponse()) {
+ mergeMultiResponse(other.getMultiResponse());
+ }
+ if (other.hasScanRequest()) {
+ mergeScanRequest(other.getScanRequest());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasRequestId()) {
+
+ return false;
+ }
+ if (hasMultiResponse()) {
+ if (!getMultiResponse().isInitialized()) {
+
+ return false;
+ }
+ }
+ if (hasScanRequest()) {
+ if (!getScanRequest().isInitialized()) {
+
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ requestId_ = input.readUInt64();
+ break;
+ }
+ case 18: {
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.Builder
+ subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse
+ .newBuilder();
+ if (hasMultiResponse()) {
+ subBuilder.mergeFrom(getMultiResponse());
+ }
+ input.readMessage(subBuilder, extensionRegistry);
+ setMultiResponse(subBuilder.buildPartial());
+ break;
+ }
+ case 26: {
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.Builder
+ subBuilder =
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.newBuilder();
+ if (hasScanRequest()) {
+ subBuilder.mergeFrom(getScanRequest());
+ }
+ input.readMessage(subBuilder, extensionRegistry);
+ setScanRequest(subBuilder.buildPartial());
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required uint64 requestId = 1;
+ private long requestId_;
+
+ public boolean hasRequestId() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+
+ public long getRequestId() {
+ return requestId_;
+ }
+
+ public Builder setRequestId(long value) {
+ bitField0_ |= 0x00000001;
+ requestId_ = value;
+ onChanged();
+ return this;
+ }
+
+ public Builder clearRequestId() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ requestId_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional .MultiResponse multiResponse = 2;
+ private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse multiResponse_ =
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse
+ .getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponseOrBuilder>
+ multiResponseBuilder_;
+
+ public boolean hasMultiResponse() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse getMultiResponse() {
+ if (multiResponseBuilder_ == null) {
+ return multiResponse_;
+ } else {
+ return multiResponseBuilder_.getMessage();
+ }
+ }
+
+ public Builder setMultiResponse(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse value) {
+ if (multiResponseBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ multiResponse_ = value;
+ onChanged();
+ } else {
+ multiResponseBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+
+ public Builder setMultiResponse(
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.Builder builderForValue) {
+ if (multiResponseBuilder_ == null) {
+ multiResponse_ = builderForValue.build();
+ onChanged();
+ } else {
+ multiResponseBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+
+ public Builder mergeMultiResponse(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse value) {
+ if (multiResponseBuilder_ == null) {
+ if (((bitField0_ & 0x00000002) == 0x00000002) &&
+ multiResponse_ !=
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse
+ .getDefaultInstance()) {
+ multiResponse_ =
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse
+ .newBuilder(multiResponse_)
+ .mergeFrom(value)
+ .buildPartial();
+ } else {
+ multiResponse_ = value;
+ }
+ onChanged();
+ } else {
+ multiResponseBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+
+ public Builder clearMultiResponse() {
+ if (multiResponseBuilder_ == null) {
+ multiResponse_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse
+ .getDefaultInstance();
+ onChanged();
+ } else {
+ multiResponseBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.Builder getMultiResponseBuilder() {
+ bitField0_ |= 0x00000002;
+ onChanged();
+ return getMultiResponseFieldBuilder().getBuilder();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponseOrBuilder getMultiResponseOrBuilder() {
+ if (multiResponseBuilder_ != null) {
+ return multiResponseBuilder_.getMessageOrBuilder();
+ } else {
+ return multiResponse_;
+ }
+ }
+
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponseOrBuilder>
+ getMultiResponseFieldBuilder() {
+ if (multiResponseBuilder_ == null) {
+ multiResponseBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponseOrBuilder>(
+ multiResponse_,
+ getParentForChildren(),
+ isClean());
+ multiResponse_ = null;
+ }
+ return multiResponseBuilder_;
+ }
+
+ // optional .ScanRequest scanRequest = 3;
+ private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest scanRequest_ =
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequestOrBuilder>
+ scanRequestBuilder_;
+
+ public boolean hasScanRequest() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest getScanRequest() {
+ if (scanRequestBuilder_ == null) {
+ return scanRequest_;
+ } else {
+ return scanRequestBuilder_.getMessage();
+ }
+ }
+
+ public Builder setScanRequest(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest value) {
+ if (scanRequestBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ scanRequest_ = value;
+ onChanged();
+ } else {
+ scanRequestBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000004;
+ return this;
+ }
+
+ public Builder setScanRequest(
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.Builder builderForValue) {
+ if (scanRequestBuilder_ == null) {
+ scanRequest_ = builderForValue.build();
+ onChanged();
+ } else {
+ scanRequestBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000004;
+ return this;
+ }
+
+ public Builder mergeScanRequest(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest value) {
+ if (scanRequestBuilder_ == null) {
+ if (((bitField0_ & 0x00000004) == 0x00000004) &&
+ scanRequest_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest
+ .getDefaultInstance()) {
+ scanRequest_ =
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest
+ .newBuilder(scanRequest_)
+ .mergeFrom(value)
+ .buildPartial();
+ } else {
+ scanRequest_ = value;
+ }
+ onChanged();
+ } else {
+ scanRequestBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000004;
+ return this;
+ }
+
+ public Builder clearScanRequest() {
+ if (scanRequestBuilder_ == null) {
+ scanRequest_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest
+ .getDefaultInstance();
+ onChanged();
+ } else {
+ scanRequestBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000004);
+ return this;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.Builder getScanRequestBuilder() {
+ bitField0_ |= 0x00000004;
+ onChanged();
+ return getScanRequestFieldBuilder().getBuilder();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequestOrBuilder getScanRequestOrBuilder() {
+ if (scanRequestBuilder_ != null) {
+ return scanRequestBuilder_.getMessageOrBuilder();
+ } else {
+ return scanRequest_;
+ }
+ }
+
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequestOrBuilder>
+ getScanRequestFieldBuilder() {
+ if (scanRequestBuilder_ == null) {
+ scanRequestBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequestOrBuilder>(
+ scanRequest_,
+ getParentForChildren(),
+ isClean());
+ scanRequest_ = null;
+ }
+ return scanRequestBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:Response)
+ }
+
+ static {
+ defaultInstance = new Response(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:Response)
+ }
+
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_Request_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_Request_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_Response_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_Response_fieldAccessorTable;
+
+ public static com.google.protobuf.Descriptors.FileDescriptor
+ getDescriptor() {
+ return descriptor;
+ }
+
+ private static com.google.protobuf.Descriptors.FileDescriptor
+ descriptor;
+
+ static {
+ java.lang.String[] descriptorData = {
+ "\n\022DataProtocol.proto\032\014client.proto\"d\n\007Re" +
+ "quest\022\021\n\trequestId\030\001 \002(\004\022#\n\014multiRequest" +
+ "\030\002 \001(\0132\r.MultiRequest\022!\n\013scanRequest\030\003 \001" +
+ "(\0132\014.ScanRequest\"g\n\010Response\022\021\n\trequestI" +
+ "d\030\001 \002(\004\022%\n\rmultiResponse\030\002 \001(\0132\016.MultiRe" +
+ "sponse\022!\n\013scanRequest\030\003 \001(\0132\014.ScanReques" +
+ "tB1\n\034org.hbase.protobuf.generatedB\014DataP" +
+ "rotocolH\001\240\001\001"
+ };
+ com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+ new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+ public com.google.protobuf.ExtensionRegistry assignDescriptors(
+ com.google.protobuf.Descriptors.FileDescriptor root) {
+ descriptor = root;
+ internal_static_Request_descriptor =
+ getDescriptor().getMessageTypes().get(0);
+ internal_static_Request_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_Request_descriptor,
+ new java.lang.String[]{"RequestId", "MultiRequest", "ScanRequest",},
+ org.hbase.protobuf.generated.DataProtocol.Request.class,
+ org.hbase.protobuf.generated.DataProtocol.Request.Builder.class);
+ internal_static_Response_descriptor =
+ getDescriptor().getMessageTypes().get(1);
+ internal_static_Response_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_Response_descriptor,
+ new java.lang.String[]{"RequestId", "MultiResponse", "ScanRequest",},
+ org.hbase.protobuf.generated.DataProtocol.Response.class,
+ org.hbase.protobuf.generated.DataProtocol.Response.Builder.class);
+ return null;
+ }
+ };
+ com.google.protobuf.Descriptors.FileDescriptor
+ .internalBuildGeneratedFileFrom(descriptorData,
+ new com.google.protobuf.Descriptors.FileDescriptor[]{
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos
+ .getDescriptor(),
+ }, assigner);
+ }
+
+ // @@protoc_insertion_point(outer_class_scope)
+}
diff --git hbase-netty/src/main/java/org/hbase/server/HBaseModule.java hbase-netty/src/main/java/org/hbase/server/HBaseModule.java
new file mode 100644
index 0000000..5f9b994
--- /dev/null
+++ hbase-netty/src/main/java/org/hbase/server/HBaseModule.java
@@ -0,0 +1,56 @@
+package org.hbase.server;
+
+import com.google.inject.AbstractModule;
+import com.google.inject.Provides;
+import com.google.inject.Singleton;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.catalog.CatalogTracker;
+import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
+
+import java.io.IOException;
+
+/**
+ *
+ */
+public class HBaseModule extends AbstractModule {
+
+ @Provides
+ @Singleton
+ public Configuration getHBaseConfiguration() {
+ return HBaseConfiguration.create();
+ }
+
+ @Provides
+ @Singleton
+ public CatalogTracker getCatalogTracker(ZooKeeperWatcher watcher, Configuration conf) {
+ CatalogTracker ct = null;
+ try {
+ ct = new CatalogTracker(watcher, conf,
+ null, conf.getInt("hbase.regionserver.catalog.timeout", Integer.MAX_VALUE));
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ return ct;
+ }
+
+ @Provides
+ @Singleton
+ public ZooKeeperWatcher getZooKeeperWatcher(Configuration conf) {
+ ZooKeeperWatcher zw = null;
+ try {
+ int port = conf.getInt(HConstants.REGIONSERVER_PORT,
+ HConstants.DEFAULT_REGIONSERVER_PORT);
+ zw = new ZooKeeperWatcher(conf, "regionserver:" + port, null);
+
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ return zw;
+ }
+
+ @Override
+ protected void configure() {
+ }
+}
diff --git hbase-netty/src/main/java/org/hbase/server/master/HBaseMasterServerModule.java hbase-netty/src/main/java/org/hbase/server/master/HBaseMasterServerModule.java
new file mode 100644
index 0000000..c1767f3
--- /dev/null
+++ hbase-netty/src/main/java/org/hbase/server/master/HBaseMasterServerModule.java
@@ -0,0 +1,10 @@
+package org.hbase.server.master;
+
+import org.hbase.server.HBaseModule;
+
+/**
+ *
+ */
+public class HBaseMasterServerModule extends HBaseModule {
+
+}
diff --git hbase-netty/src/main/java/org/hbase/server/master/NMasterServer.java hbase-netty/src/main/java/org/hbase/server/master/NMasterServer.java
new file mode 100644
index 0000000..ccffa70
--- /dev/null
+++ hbase-netty/src/main/java/org/hbase/server/master/NMasterServer.java
@@ -0,0 +1,84 @@
+package org.hbase.server.master;
+
+import com.google.inject.Inject;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.Server;
+import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.catalog.CatalogTracker;
+import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
+
+import java.net.UnknownHostException;
+
+/**
+ *
+ */
+public class NMasterServer implements Runnable, Server {
+
+ private Configuration conf;
+ private ZooKeeperWatcher zookeeer;
+ private CatalogTracker catalogTracker;
+ private boolean aborted = false;
+ private boolean stopped = true;
+
+ @Inject
+ public NMasterServer(Configuration configuration,
+ ZooKeeperWatcher zookeeer,
+ CatalogTracker catalogTracker) throws UnknownHostException {
+ this.conf = configuration;
+ this.zookeeer = zookeeer;
+ this.catalogTracker = catalogTracker;
+
+ this.zookeeer.setAbortable(this);
+ this.catalogTracker.setAbortable(this);
+ }
+
+ @Override
+ public void run() {
+ this.stopped = false;
+
+ }
+
+ public static void main(String args[]) throws Exception {
+ new NMasterServerCommandLine().doMain(args);
+ }
+
+ @Override
+ public Configuration getConfiguration() {
+ return this.conf;
+ }
+
+ @Override
+ public ZooKeeperWatcher getZooKeeper() {
+ return this.zookeeer;
+ }
+
+ @Override
+ public CatalogTracker getCatalogTracker() {
+ return this.catalogTracker;
+ }
+
+ @Override
+ public ServerName getServerName() {
+ return null;
+ }
+
+ @Override
+ public void abort(String why, Throwable e) {
+ this.aborted = true;
+ }
+
+ @Override
+ public boolean isAborted() {
+ return aborted; //To change body of implemented methods use File | Settings | File Templates.
+ }
+
+ @Override
+ public void stop(String why) {
+ this.stopped = true;
+ }
+
+ @Override
+ public boolean isStopped() {
+ return this.stopped; //To change body of implemented methods use File | Settings | File Templates.
+ }
+}
diff --git hbase-netty/src/main/java/org/hbase/server/master/NMasterServerCommandLine.java hbase-netty/src/main/java/org/hbase/server/master/NMasterServerCommandLine.java
new file mode 100644
index 0000000..c8a7caf
--- /dev/null
+++ hbase-netty/src/main/java/org/hbase/server/master/NMasterServerCommandLine.java
@@ -0,0 +1,30 @@
+package org.hbase.server.master;
+
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.util.ServerCommandLine;
+
+/**
+ *
+ */
+public class NMasterServerCommandLine extends ServerCommandLine {
+
+ private static final String USAGE = "Usage: NMasterServer [-D conf.param=value] start";
+ private static final Log LOG = LogFactory.getLog(NMasterServerCommandLine.class);
+ private Injector injector = Guice.createInjector(new HBaseMasterServerModule());
+
+ @Override
+ protected String getUsage() {
+ return USAGE;
+ }
+
+ @Override
+ public int run(String[] args) throws Exception {
+ NMasterServer server = injector.getInstance(NMasterServer.class);
+ server.run();
+ return 0;
+ }
+
+}
diff --git hbase-netty/src/main/java/org/hbase/server/regionserver/HBaseRegionServerModule.java hbase-netty/src/main/java/org/hbase/server/regionserver/HBaseRegionServerModule.java
new file mode 100644
index 0000000..88a8526
--- /dev/null
+++ hbase-netty/src/main/java/org/hbase/server/regionserver/HBaseRegionServerModule.java
@@ -0,0 +1,46 @@
+package org.hbase.server.regionserver;
+
+import com.google.inject.Provides;
+import com.google.inject.Singleton;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.zookeeper.ClusterStatusTracker;
+import org.apache.hadoop.hbase.zookeeper.MasterAddressTracker;
+import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
+import org.hbase.server.HBaseModule;
+import org.hbase.server.regionserver.data.DataServer;
+import org.hbase.server.regionserver.data.protobuf.ProtoDataServer;
+
+import java.net.UnknownHostException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ *
+ */
+public class HBaseRegionServerModule extends HBaseModule {
+
+ @Provides
+ @Singleton
+ public MasterAddressTracker getMasterAddressTracker(ZooKeeperWatcher watcher) {
+ return new MasterAddressTracker(watcher, null);
+ }
+
+ @Provides
+ @Singleton
+ public ClusterStatusTracker getClusterStatusTracker(ZooKeeperWatcher watcher) {
+ return new ClusterStatusTracker(watcher, null);
+ }
+
+ @Provides
+ @Singleton
+ public List getEnabledDataServers(Configuration conf) {
+ List enabledDs = new ArrayList();
+ try {
+ Collections.addAll(enabledDs, new ProtoDataServer(conf));
+ } catch (UnknownHostException e) {
+ e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
+ }
+ return enabledDs;
+ }
+}
diff --git hbase-netty/src/main/java/org/hbase/server/regionserver/NRegionServer.java hbase-netty/src/main/java/org/hbase/server/regionserver/NRegionServer.java
new file mode 100644
index 0000000..8f9f396
--- /dev/null
+++ hbase-netty/src/main/java/org/hbase/server/regionserver/NRegionServer.java
@@ -0,0 +1,123 @@
+package org.hbase.server.regionserver;
+
+import com.google.inject.Inject;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.Server;
+import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.catalog.CatalogTracker;
+import org.apache.hadoop.hbase.zookeeper.ClusterStatusTracker;
+import org.apache.hadoop.hbase.zookeeper.MasterAddressTracker;
+import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
+import org.hbase.server.regionserver.admin.AdminServer;
+import org.hbase.server.regionserver.data.DataServer;
+import org.hbase.server.regionserver.data.protobuf.ProtoDataServer;
+
+import java.net.UnknownHostException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ *
+ */
+public class NRegionServer implements Runnable, Server {
+
+ private static final Log LOG = LogFactory.getLog(NRegionServer.class);
+ private Configuration conf;
+ private AdminServer adminServer;
+ private ZooKeeperWatcher zookeeer;
+ private MasterAddressTracker masterAddressTracker;
+ private ClusterStatusTracker clusterStatusTracker;
+ private CatalogTracker catalogTracker;
+ private List dataServerList;
+ private boolean aborted = false;
+ private boolean stopped = true;
+
+ @Inject
+ public NRegionServer(Configuration configuration,
+ ZooKeeperWatcher watcher,
+ MasterAddressTracker masterAddressTracker,
+ ClusterStatusTracker clusterStatusTracker,
+ CatalogTracker catalogTracker) throws UnknownHostException {
+
+ this.conf = configuration;
+ this.zookeeer = watcher;
+ this.masterAddressTracker = masterAddressTracker;
+ this.clusterStatusTracker = clusterStatusTracker;
+ this.catalogTracker = catalogTracker;
+
+ this.zookeeer.setAbortable(this);
+ this.masterAddressTracker.setAbortable(this);
+ this.clusterStatusTracker.setAbortable(this);
+ this.catalogTracker.setAbortable(this);
+
+ //Create all of the dependent servers.
+ this.adminServer = new AdminServer(getConfiguration());
+ this.dataServerList = new ArrayList();
+ Collections.addAll(dataServerList, new ProtoDataServer(getConfiguration()));
+
+ }
+
+ public void initZookeeper() {
+ }
+
+ @Override
+ public void run() {
+ stopped = false;
+ initZookeeper();
+ adminServer.run();
+ for (DataServer ds : this.dataServerList) {
+ ds.run();
+ }
+ }
+
+ @Override
+ public void abort(String why, Throwable e) {
+ for (DataServer ds : this.dataServerList) {
+ ds.abort(why, e);
+ }
+ }
+
+ @Override
+ public boolean isAborted() {
+ return aborted;
+ }
+
+ @Override
+ public Configuration getConfiguration() {
+ return this.conf;
+ }
+
+ @Override
+ public ZooKeeperWatcher getZooKeeper() {
+ return this.zookeeer;
+ }
+
+ @Override
+ public CatalogTracker getCatalogTracker() {
+ return this.catalogTracker;
+ }
+
+ @Override
+ public ServerName getServerName() {
+ return null;
+ }
+
+ @Override
+ public void stop(String why) {
+ stopped = true;
+
+ }
+
+ @Override
+ public boolean isStopped() {
+ return stopped;
+ }
+
+ public static void main(String args[]) throws Exception {
+ new NRegionServerCommandLine().doMain(args);
+ }
+
+}
diff --git hbase-netty/src/main/java/org/hbase/server/regionserver/NRegionServerCommandLine.java hbase-netty/src/main/java/org/hbase/server/regionserver/NRegionServerCommandLine.java
new file mode 100644
index 0000000..223e428
--- /dev/null
+++ hbase-netty/src/main/java/org/hbase/server/regionserver/NRegionServerCommandLine.java
@@ -0,0 +1,30 @@
+package org.hbase.server.regionserver;
+
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.util.ServerCommandLine;
+
+/**
+ *
+ */
+public class NRegionServerCommandLine extends ServerCommandLine {
+
+ private static final String USAGE = "Usage: NRegionServer [-D conf.param=value] start";
+ private static final Log LOG = LogFactory.getLog(NRegionServerCommandLine.class);
+
+ private Injector injector = Guice.createInjector(new HBaseRegionServerModule());
+
+ @Override
+ protected String getUsage() {
+ return USAGE;
+ }
+
+ @Override
+ public int run(String[] args) throws Exception {
+ NRegionServer server = injector.getInstance(NRegionServer.class);
+ server.run();
+ return 0;
+ }
+}
diff --git hbase-netty/src/main/java/org/hbase/server/regionserver/admin/AdminHandler.java hbase-netty/src/main/java/org/hbase/server/regionserver/admin/AdminHandler.java
new file mode 100644
index 0000000..be891b8
--- /dev/null
+++ hbase-netty/src/main/java/org/hbase/server/regionserver/admin/AdminHandler.java
@@ -0,0 +1,10 @@
+package org.hbase.server.regionserver.admin;
+
+import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
+
+/**
+ *
+ */
+public class AdminHandler extends SimpleChannelUpstreamHandler {
+
+}
diff --git hbase-netty/src/main/java/org/hbase/server/regionserver/admin/AdminPipelineFactory.java hbase-netty/src/main/java/org/hbase/server/regionserver/admin/AdminPipelineFactory.java
new file mode 100644
index 0000000..8fbe375
--- /dev/null
+++ hbase-netty/src/main/java/org/hbase/server/regionserver/admin/AdminPipelineFactory.java
@@ -0,0 +1,64 @@
+package org.hbase.server.regionserver.admin;
+
+import com.google.inject.Inject;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.hbase.protobuf.generated.AdminProtocol;
+import org.jboss.netty.channel.ChannelPipeline;
+import org.jboss.netty.channel.ChannelPipelineFactory;
+import org.jboss.netty.handler.codec.oneone.OneToOneDecoder;
+import org.jboss.netty.handler.codec.oneone.OneToOneEncoder;
+import org.jboss.netty.handler.codec.protobuf.ProtobufDecoder;
+import org.jboss.netty.handler.codec.protobuf.ProtobufEncoder;
+import org.jboss.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
+import org.jboss.netty.handler.codec.protobuf.ProtobufVarint32LengthFieldPrepender;
+import org.jboss.netty.handler.execution.ExecutionHandler;
+import org.jboss.netty.handler.execution.OrderedMemoryAwareThreadPoolExecutor;
+
+import java.util.concurrent.TimeUnit;
+
+import static org.jboss.netty.channel.Channels.pipeline;
+
+/**
+ *
+ */
+public class AdminPipelineFactory extends Configured implements ChannelPipelineFactory {
+
+ /** The decoder that will take bytes and give out protocol buf messages */
+ private OneToOneDecoder protoDecoder =
+ new ProtobufDecoder(AdminProtocol.Request.getDefaultInstance());
+
+ /** The thread pool for hregions blocking reads/writes */
+ private OrderedMemoryAwareThreadPoolExecutor eventExecutor =
+ new OrderedMemoryAwareThreadPoolExecutor(45, 1000000, 10000000, 100, TimeUnit.MILLISECONDS);
+
+ /** The exectuion wrapper of the above thread pool */
+ private ExecutionHandler exHandler = new ExecutionHandler(eventExecutor);
+
+ /** The handler that will run on the above. */
+ private AdminHandler adminHandler = new AdminHandler();
+
+ /** Given a protobuf message give bytes back bytes. */
+ private OneToOneEncoder protoEncoder = new ProtobufEncoder();
+
+ /** Prepend the bytes with a length. */
+ private OneToOneEncoder frameEncoder =
+ new ProtobufVarint32LengthFieldPrepender();
+
+ @Inject
+ public AdminPipelineFactory(Configuration configuration) {
+ super(configuration);
+ }
+
+ @Override
+ public ChannelPipeline getPipeline() throws Exception {
+ ChannelPipeline p = pipeline();
+ p.addLast("frameDecoder", new ProtobufVarint32FrameDecoder());
+ p.addLast("protobufDecoder", protoDecoder);
+ p.addLast("frameEncoder", frameEncoder);
+ p.addLast("protobufEncoder", protoEncoder);
+ p.addLast("threadPoolExecutor", exHandler);
+ p.addLast("hbaseAdminr", adminHandler);
+ return p;
+ }
+}
diff --git hbase-netty/src/main/java/org/hbase/server/regionserver/admin/AdminServer.java hbase-netty/src/main/java/org/hbase/server/regionserver/admin/AdminServer.java
new file mode 100644
index 0000000..7bed1ba
--- /dev/null
+++ hbase-netty/src/main/java/org/hbase/server/regionserver/admin/AdminServer.java
@@ -0,0 +1,67 @@
+package org.hbase.server.regionserver.admin;
+
+import com.google.inject.Guice;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.hbase.Abortable;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.util.Strings;
+import org.apache.hadoop.net.DNS;
+import org.jboss.netty.bootstrap.ServerBootstrap;
+import org.jboss.netty.channel.ChannelPipelineFactory;
+import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
+
+import java.net.InetSocketAddress;
+import java.net.UnknownHostException;
+import java.util.concurrent.Executors;
+
+/**
+ *
+ */
+public class AdminServer extends Configured implements Runnable, Abortable {
+
+ private Injector injector = Guice.createInjector();
+
+ private static final Log LOG = LogFactory.getLog(AdminServer.class);
+ private InetSocketAddress adminIsa;
+ private ServerBootstrap adminBootstrap;
+ private boolean aborted = false;
+
+ @Inject
+ public AdminServer(Configuration configuration) throws UnknownHostException {
+ super(configuration);
+ int port = getConf().getInt(HConstants.REGIONSERVER_PORT,
+ HConstants.DEFAULT_REGIONSERVER_PORT);
+ String hostname = Strings.domainNamePointerToHostName(DNS.getDefaultHost(
+ getConf().get("hbase.regionserver.dns.interface", "default"),
+ getConf().get("hbase.regionserver.dns.nameserver", "default")));
+ adminIsa = new InetSocketAddress(hostname, port + 1);
+ }
+
+ public void run() {
+ LOG.debug("Starting Netty RegionServer[Admin]");
+ adminBootstrap = new ServerBootstrap(
+ new NioServerSocketChannelFactory(
+ Executors.newCachedThreadPool(),
+ Executors.newCachedThreadPool()));
+ ChannelPipelineFactory adminPipelineFactory = injector.getInstance(AdminPipelineFactory.class);
+ adminBootstrap.setPipelineFactory(adminPipelineFactory);
+ adminBootstrap.bind(adminIsa);
+ LOG.debug("Done Starting Netty RegionServer[Admin]");
+ }
+
+ @Override
+ public void abort(String why, Throwable e) {
+ this.aborted = true;
+ adminBootstrap.releaseExternalResources();
+ }
+
+ @Override
+ public boolean isAborted() {
+ return aborted;
+ }
+}
diff --git hbase-netty/src/main/java/org/hbase/server/regionserver/data/DataServer.java hbase-netty/src/main/java/org/hbase/server/regionserver/data/DataServer.java
new file mode 100644
index 0000000..c81256c
--- /dev/null
+++ hbase-netty/src/main/java/org/hbase/server/regionserver/data/DataServer.java
@@ -0,0 +1,10 @@
+package org.hbase.server.regionserver.data;
+
+import org.apache.hadoop.hbase.Abortable;
+
+/**
+ *
+ */
+public interface DataServer extends Runnable, Abortable {
+
+}
diff --git hbase-netty/src/main/java/org/hbase/server/regionserver/data/protobuf/ProtoDataHandler.java hbase-netty/src/main/java/org/hbase/server/regionserver/data/protobuf/ProtoDataHandler.java
new file mode 100644
index 0000000..2d70f57
--- /dev/null
+++ hbase-netty/src/main/java/org/hbase/server/regionserver/data/protobuf/ProtoDataHandler.java
@@ -0,0 +1,28 @@
+package org.hbase.server.regionserver.data.protobuf;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
+import org.jboss.netty.channel.ChannelHandlerContext;
+import org.jboss.netty.channel.MessageEvent;
+import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
+
+/**
+ *
+ */
+public class ProtoDataHandler extends SimpleChannelUpstreamHandler {
+
+ private static final Log LOG = LogFactory.getLog(ProtoDataHandler.class);
+
+ @Override
+ public void messageReceived(
+ ChannelHandlerContext ctx, MessageEvent e) {
+ Object o = e.getMessage();
+ if (o instanceof ClientProtos.MultiRequest) {
+
+ } else {
+ LOG.warn("Error parsing Protobuf from client");
+ }
+ }
+
+}
diff --git hbase-netty/src/main/java/org/hbase/server/regionserver/data/protobuf/ProtoDataPipelineFactory.java hbase-netty/src/main/java/org/hbase/server/regionserver/data/protobuf/ProtoDataPipelineFactory.java
new file mode 100644
index 0000000..64c2d0f
--- /dev/null
+++ hbase-netty/src/main/java/org/hbase/server/regionserver/data/protobuf/ProtoDataPipelineFactory.java
@@ -0,0 +1,65 @@
+package org.hbase.server.regionserver.data.protobuf;
+
+import com.google.inject.Guice;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.hbase.protobuf.generated.DataProtocol;
+import org.jboss.netty.channel.ChannelPipeline;
+import org.jboss.netty.channel.ChannelPipelineFactory;
+import org.jboss.netty.handler.codec.oneone.OneToOneDecoder;
+import org.jboss.netty.handler.codec.oneone.OneToOneEncoder;
+import org.jboss.netty.handler.codec.protobuf.ProtobufDecoder;
+import org.jboss.netty.handler.codec.protobuf.ProtobufEncoder;
+import org.jboss.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
+import org.jboss.netty.handler.codec.protobuf.ProtobufVarint32LengthFieldPrepender;
+import org.jboss.netty.handler.execution.ExecutionHandler;
+import org.jboss.netty.handler.execution.OrderedMemoryAwareThreadPoolExecutor;
+
+import java.util.concurrent.TimeUnit;
+
+import static org.jboss.netty.channel.Channels.pipeline;
+
+public class ProtoDataPipelineFactory extends Configured implements ChannelPipelineFactory {
+
+ private Injector injector = Guice.createInjector();
+
+ /** The decoder that will take bytes and give out protocol buf messages */
+ private OneToOneDecoder protoDecoder =
+ new ProtobufDecoder(DataProtocol.Request.getDefaultInstance());
+
+ /** The thread pool for hregions blocking reads/writes */
+ private OrderedMemoryAwareThreadPoolExecutor eventExecutor =
+ new OrderedMemoryAwareThreadPoolExecutor(45, 1000000, 10000000, 100, TimeUnit.MILLISECONDS);
+
+ /** The exectuion wrapper of the above thread pool */
+ private ExecutionHandler exHandler = new ExecutionHandler(eventExecutor);
+
+ /** The handler that will run on the above. */
+ private ProtoDataHandler rsHandler = injector.getInstance(ProtoDataHandler.class);
+
+ /** Given a protobuf message give bytes back bytes. */
+ private OneToOneEncoder protoEncoder = new ProtobufEncoder();
+
+ /** Prepend the bytes with a length. */
+ private OneToOneEncoder frameEncoder =
+ new ProtobufVarint32LengthFieldPrepender();
+
+ @Inject
+ public ProtoDataPipelineFactory(Configuration configuration) {
+ super(configuration);
+ }
+
+ @Override
+ public ChannelPipeline getPipeline() throws Exception {
+ ChannelPipeline p = pipeline();
+ p.addLast("frameDecoder", new ProtobufVarint32FrameDecoder());
+ p.addLast("protobufDecoder", protoDecoder);
+ p.addLast("frameEncoder", frameEncoder);
+ p.addLast("protobufEncoder", protoEncoder);
+ p.addLast("threadPoolExecutor", exHandler);
+ p.addLast("dataHandler", rsHandler);
+ return p;
+ }
+}
diff --git hbase-netty/src/main/java/org/hbase/server/regionserver/data/protobuf/ProtoDataServer.java hbase-netty/src/main/java/org/hbase/server/regionserver/data/protobuf/ProtoDataServer.java
new file mode 100644
index 0000000..25b9b07
--- /dev/null
+++ hbase-netty/src/main/java/org/hbase/server/regionserver/data/protobuf/ProtoDataServer.java
@@ -0,0 +1,68 @@
+package org.hbase.server.regionserver.data.protobuf;
+
+import com.google.inject.Guice;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.util.Strings;
+import org.apache.hadoop.net.DNS;
+import org.hbase.server.regionserver.data.DataServer;
+import org.jboss.netty.bootstrap.ServerBootstrap;
+import org.jboss.netty.channel.ChannelPipelineFactory;
+import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
+
+import java.net.InetSocketAddress;
+import java.net.UnknownHostException;
+import java.util.concurrent.Executors;
+
+/**
+ *
+ */
+public class ProtoDataServer extends Configured implements DataServer {
+
+ private Injector injector = Guice.createInjector();
+
+ private static final Log LOG = LogFactory.getLog(ProtoDataServer.class);
+ private InetSocketAddress dataIsa;
+ private ServerBootstrap dataBootstrap;
+ private boolean aborted = false;
+
+ @Inject
+ public ProtoDataServer(Configuration configuration) throws UnknownHostException {
+ super(configuration);
+ int port = getConf().getInt(HConstants.REGIONSERVER_PORT,
+ HConstants.DEFAULT_REGIONSERVER_PORT);
+ String hostname = Strings.domainNamePointerToHostName(DNS.getDefaultHost(
+ getConf().get("hbase.regionserver.dns.interface", "default"),
+ getConf().get("hbase.regionserver.dns.nameserver", "default")));
+ dataIsa = new InetSocketAddress(hostname, port);
+ }
+
+ public void run() {
+ LOG.debug("Starting Netty RegionServer[Data]");
+ dataBootstrap = new ServerBootstrap(
+ new NioServerSocketChannelFactory(
+ Executors.newCachedThreadPool(),
+ Executors.newCachedThreadPool()));
+ ChannelPipelineFactory dataPipelineFactory =
+ injector.getInstance(ProtoDataPipelineFactory.class);
+ dataBootstrap.setPipelineFactory(dataPipelineFactory);
+ dataBootstrap.bind(dataIsa);
+ LOG.debug("Done starting Netty RegionServer[Data]");
+ }
+
+ @Override
+ public void abort(String why, Throwable e) {
+ this.aborted = true;
+ dataBootstrap.releaseExternalResources();
+ }
+
+ @Override
+ public boolean isAborted() {
+ return aborted;
+ }
+}
diff --git hbase-netty/src/main/protobuf/AdminProtocol.proto hbase-netty/src/main/protobuf/AdminProtocol.proto
new file mode 100644
index 0000000..20536f6
--- /dev/null
+++ hbase-netty/src/main/protobuf/AdminProtocol.proto
@@ -0,0 +1,15 @@
+option java_package = "org.hbase.protobuf.generated";
+option java_outer_classname = "AdminProtocol";
+option java_generate_equals_and_hash = true;
+option optimize_for = SPEED;
+
+import "client.proto";
+
+
+message Request {
+ required uint64 requestId = 1;
+}
+
+message Response {
+ required uint64 requestId = 1;
+}
\ No newline at end of file
diff --git hbase-netty/src/main/protobuf/DataProtocol.proto hbase-netty/src/main/protobuf/DataProtocol.proto
new file mode 100644
index 0000000..ea42497
--- /dev/null
+++ hbase-netty/src/main/protobuf/DataProtocol.proto
@@ -0,0 +1,19 @@
+option java_package = "org.hbase.protobuf.generated";
+option java_outer_classname = "DataProtocol";
+option java_generate_equals_and_hash = true;
+option optimize_for = SPEED;
+
+import "client.proto";
+
+
+message Request {
+ required uint64 requestId = 1;
+ optional MultiRequest multiRequest = 2;
+ repeated ScanRequest scanRequest = 3;
+}
+
+message Response {
+ required uint64 requestId = 1;
+ optional MultiResponse multiResponse = 2;
+ repeated ScanRequest scanRequest = 3;
+}
\ No newline at end of file
diff --git hbase-netty/src/test/java/org/hbase/server/regionserver/data/RegionServerTestModule.java hbase-netty/src/test/java/org/hbase/server/regionserver/data/RegionServerTestModule.java
new file mode 100644
index 0000000..a659579
--- /dev/null
+++ hbase-netty/src/test/java/org/hbase/server/regionserver/data/RegionServerTestModule.java
@@ -0,0 +1,22 @@
+package org.hbase.server.regionserver.data;
+
+import com.google.inject.Binder;
+import com.google.inject.Module;
+import com.google.inject.Provides;
+import org.apache.hadoop.conf.Configuration;
+
+/**
+ *
+ */
+public class RegionServerTestModule implements Module {
+
+ @Provides
+ public Configuration getHBaseConfiguration() {
+ return new Configuration();
+ }
+
+ @Override
+ public void configure(Binder binder) {
+ //To change body of implemented methods use File | Settings | File Templates.
+ }
+}
diff --git hbase-netty/src/test/java/org/hbase/server/regionserver/data/proto/TestDataPipelineFactory.java hbase-netty/src/test/java/org/hbase/server/regionserver/data/proto/TestDataPipelineFactory.java
new file mode 100644
index 0000000..d6bffb7
--- /dev/null
+++ hbase-netty/src/test/java/org/hbase/server/regionserver/data/proto/TestDataPipelineFactory.java
@@ -0,0 +1,37 @@
+package org.hbase.server.regionserver.data.proto;
+
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import org.hbase.server.regionserver.data.RegionServerTestModule;
+import org.hbase.server.regionserver.data.protobuf.ProtoDataPipelineFactory;
+import org.jboss.netty.channel.ChannelPipeline;
+import org.jboss.netty.channel.ChannelPipelineFactory;
+import org.junit.Test;
+
+import java.util.Arrays;
+import java.util.List;
+
+import static org.hamcrest.core.Is.is;
+import static org.junit.Assert.assertThat;
+
+/**
+ *
+ */
+public class TestDataPipelineFactory {
+
+ private Injector injector = Guice.createInjector(new RegionServerTestModule());
+
+ @Test
+ public void testPipelineContains() throws Exception {
+ ChannelPipelineFactory pf = injector.getInstance(ProtoDataPipelineFactory.class);
+
+ ChannelPipeline p = pf.getPipeline();
+
+ List names = pf.getPipeline().getNames();
+
+ List exp = Arrays.asList("frameDecoder", "protobufDecoder", "frameEncoder",
+ "protobufEncoder", "threadPoolExecutor", "dataHandler");
+ assertThat(names, is(exp));
+ }
+
+}
diff --git hbase-server/pom.xml hbase-server/pom.xml
index 4359d9c..2bda2d6 100644
--- hbase-server/pom.xml
+++ hbase-server/pom.xml
@@ -292,7 +292,6 @@
io.netty
netty
- 3.5.0.Final-SNAPSHOT
com.yammer.metrics
@@ -355,16 +354,37 @@
com.thoughtworks.paranamer
paranamer-ant
+
+ org.jboss.netty
+ netty
+
+
org.apache.avro
avro-ipc
${avro.version}
+
+
+ org.jboss.netty
+ netty
+
+
+
+
+ io.netty
+ netty
org.apache.zookeeper
zookeeper
+
+
+ org.jboss.netty
+ netty
+
+
org.apache.thrift
diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/catalog/CatalogTracker.java hbase-server/src/main/java/org/apache/hadoop/hbase/catalog/CatalogTracker.java
index 8a383e4..4375143 100644
--- hbase-server/src/main/java/org/apache/hadoop/hbase/catalog/CatalogTracker.java
+++ hbase-server/src/main/java/org/apache/hadoop/hbase/catalog/CatalogTracker.java
@@ -113,6 +113,8 @@ public class CatalogTracker {
private final MetaNodeTracker metaNodeTracker;
private final AtomicBoolean metaAvailable = new AtomicBoolean(false);
private boolean instantiatedzkw = false;
+
+
private Abortable abortable;
/*
@@ -700,4 +702,13 @@ public class CatalogTracker {
public HConnection getConnection() {
return this.connection;
}
+
+ public Abortable getAbortable() {
+ return abortable;
+ }
+
+ public void setAbortable(Abortable abortable) {
+ this.abortable = abortable;
+ }
+
}
diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java
index 56c9eba..e0daaca 100644
--- hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java
+++ hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java
@@ -527,6 +527,39 @@ public final class RequestConverter {
return builder.build();
}
+ public static MultiRequest buildMultiRequestUntyped(byte[] regionName, List actions)
+ throws IOException {
+ ClientProtos.MultiRequest.Builder mr = ClientProtos.MultiRequest.newBuilder();
+ mr.setRegion(RequestConverter
+ .buildRegionSpecifier(RegionSpecifier.RegionSpecifierType.REGION_NAME, regionName));
+ for (Action a : actions) {
+ ClientProtos.MultiAction.Builder ab = ClientProtos.MultiAction.newBuilder();
+ Row row = a.getAction();
+ if (row instanceof Get) {
+ ab.setGet(ProtobufUtil.toGet((Get) row));
+ } else if (row instanceof Put) {
+ ab.setMutate(ProtobufUtil.toMutate(ClientProtos.Mutate.MutateType.PUT, (Put) row));
+ } else if (row instanceof Delete) {
+ ab.setMutate(ProtobufUtil
+ .toMutate(ClientProtos.Mutate.MutateType.DELETE, (Delete) row));
+ } else if (row instanceof Exec) {
+ ab.setExec(ProtobufUtil.toExec((Exec) row));
+ } else if (row instanceof Append) {
+ ab.setMutate(ProtobufUtil
+ .toMutate(ClientProtos.Mutate.MutateType.APPEND, (Append) row));
+ } else if (row instanceof Increment) {
+ ab.setMutate(ProtobufUtil.toMutate((Increment) row));
+ } else if (row instanceof RowMutations) {
+ continue; // ignore RowMutations
+ } else {
+ throw new DoNotRetryIOException(
+ "multi doesn't support " + row.getClass().getName());
+ }
+ mr.addAction(ab);
+ }
+ return mr.build();
+ }
+
// End utilities for Client
//Start utilities for Admin
@@ -1048,7 +1081,7 @@ public final class RequestConverter {
/**
* Creates a protocol buffer GetSchemaAlterStatusRequest
*
- * @param tableName
+ * @param table
* @return a GetSchemaAlterStatusRequest
*/
public static GetSchemaAlterStatusRequest buildGetSchemaAlterStatusRequest(final byte [] table) {
diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperNodeTracker.java hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperNodeTracker.java
index 4f7fe6e..a658843 100644
--- hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperNodeTracker.java
+++ hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperNodeTracker.java
@@ -47,7 +47,7 @@ public abstract class ZooKeeperNodeTracker extends ZooKeeperListener {
private byte [] data;
/** Used to abort if a fatal error occurs */
- protected final Abortable abortable;
+ protected Abortable abortable;
private boolean stopped = false;
@@ -253,4 +253,12 @@ public abstract class ZooKeeperNodeTracker extends ZooKeeperListener {
}
return true;
}
+
+ public Abortable getAbortable() {
+ return abortable;
+ }
+
+ public void setAbortable(Abortable abortable) {
+ this.abortable = abortable;
+ }
}
diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
index 33bc1d0..720d0cc 100644
--- hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
+++ hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
@@ -468,4 +468,13 @@ public class ZooKeeperWatcher implements Watcher, Abortable, Closeable {
public String getMasterAddressZNode() {
return this.masterAddressZNode;
}
+
+
+ public Abortable getAbortable() {
+ return abortable;
+ }
+
+ public void setAbortable(Abortable abortable) {
+ this.abortable = abortable;
+ }
}
diff --git pom.xml pom.xml
index dd458ec..512b35d 100644
--- pom.xml
+++ pom.xml
@@ -40,6 +40,7 @@
http://hbase.apache.org
+ hbase-netty
hbase-server
hbase-common
hbase-it
@@ -257,10 +258,6 @@
- cloudbees netty
- http://repository-netty.forge.cloudbees.com/snapshot/
-
-
apache release
https://repository.apache.org/content/repositories/releases/
@@ -806,6 +803,8 @@
0.0.1-SNAPSHOT
2.6.3
2.3.1
+ 3.5.0.Final
+ 3.0
2.3
1.6
@@ -855,6 +854,7 @@
hbase-common
${project.version}
+
hbase-server
org.apache.hbase
@@ -867,6 +867,11 @@
test-jar
test
+
+ org.apache.hbase
+ hbase-netty
+ ${project.version}
+
com.yammer.metrics
@@ -1118,6 +1123,18 @@
${mockito-all.version}
test
+
+
+ io.netty
+ netty
+ ${netty.version}
+
+
+ com.google.inject
+ guice
+ ${guice.version}
+
+