diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
index 3d40c70..118ee08 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
@@ -35,7 +35,7 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.google.common.annotations.VisibleForTesting;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientSmallScanner.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientSmallScanner.java
index f9bdd55..4a0df33 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientSmallScanner.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientSmallScanner.java
@@ -19,7 +19,7 @@
package org.apache.hadoop.hbase.client;
import com.google.common.annotations.VisibleForTesting;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
index e43a712..89e9ec5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
@@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.client;
import static org.apache.hadoop.hbase.client.MetricsConnection.CLIENT_SIDE_METRICS_ENABLED_KEY;
import com.google.common.annotations.VisibleForTesting;
-import com.google.protobuf.BlockingRpcChannel;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.BlockingRpcChannel;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
import java.io.Closeable;
import java.io.IOException;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FlushRegionCallable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FlushRegionCallable.java
index 73bdb74..cee2779 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FlushRegionCallable.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FlushRegionCallable.java
@@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRespons
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
/**
* A Callable for flushRegion() RPC.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
index 9541967..510e92a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
@@ -177,8 +177,8 @@ import org.apache.hadoop.util.StringUtils;
import org.apache.zookeeper.KeeperException;
import com.google.common.annotations.VisibleForTesting;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.ByteString;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
/**
* HBaseAdmin is no longer a client API. It is marked InterfaceAudience.Private indicating that
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
index befc671..cf62561 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
@@ -69,10 +69,10 @@ import org.apache.hadoop.hbase.util.ReflectionUtils;
import org.apache.hadoop.hbase.util.Threads;
import com.google.common.annotations.VisibleForTesting;
-import com.google.protobuf.Descriptors;
-import com.google.protobuf.Message;
-import com.google.protobuf.Service;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.Descriptors;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.Service;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
/**
* An implementation of {@link Table}. Used to communicate with a single HBase table.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java
index 53a3326..303611d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java
@@ -18,8 +18,8 @@
package org.apache.hadoop.hbase.client;
import com.google.common.annotations.VisibleForTesting;
-import com.google.protobuf.Descriptors.MethodDescriptor;
-import com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.MetricRegistry;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
index f78f348..e78a7c9 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
@@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
import com.google.common.annotations.VisibleForTesting;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
/**
* Callable that handles the multi method call going against a single
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorServiceExec.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorServiceExec.java
index ad1d2a1..1101020 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorServiceExec.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorServiceExec.java
@@ -23,8 +23,8 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.util.Bytes;
import com.google.common.base.Objects;
-import com.google.protobuf.Descriptors.MethodDescriptor;
-import com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
/**
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java
index 6ce4956..6d57aa7 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java
@@ -36,7 +36,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.ExceptionUtil;
import org.apache.hadoop.ipc.RemoteException;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
/**
* Runs an rpc'ing {@link RetryingCallable}. Sets into rpc client
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java
index f4e2614..7745710 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java
@@ -48,7 +48,7 @@ import org.apache.hadoop.hbase.protobuf.RequestConverter;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
/**
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java
index 72d69ec..a81306e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java
@@ -52,8 +52,8 @@ import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.net.DNS;
-import com.google.protobuf.ServiceException;
-import com.google.protobuf.TextFormat;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.TextFormat;
/**
* Scanner operations such as create, next, etc.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java
index 3e9db00..4b0dada 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java
@@ -32,10 +32,10 @@ import org.apache.hadoop.hbase.client.coprocessor.Batch;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
-import com.google.protobuf.Descriptors;
-import com.google.protobuf.Message;
-import com.google.protobuf.Service;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.Descriptors;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.Service;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
/**
* Used to communicate with a single HBase table.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableState.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableState.java
index 5d4ac8e..deb700b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableState.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableState.java
@@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hbase.client;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java
index 594a459..feb4c8b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java
@@ -53,8 +53,8 @@ import org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateServi
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.ByteString;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
/**
* This client class is for invoking the aggregate functions deployed on the
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationSerDeHelper.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationSerDeHelper.java
index 9682f89..fb1dcff 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationSerDeHelper.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationSerDeHelper.java
@@ -18,7 +18,7 @@
*/
package org.apache.hadoop.hbase.client.replication;
-import com.google.protobuf.ByteString;
+import org.apache.hbase.shaded.com.google.protobuf.ByteString;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TableName;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java
index e247c08..283244d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java
@@ -24,7 +24,7 @@ import java.io.IOException;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
/**
* Defines how value for specific column is interpreted and provides utility
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java
index 3cbb7b9..1bc8af3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java
@@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* A binary comparator which lexicographically compares against the specified
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java
index a26edbc..d001532 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java
@@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* A comparator which compares against a specified byte array, but only compares
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java
index db51df7..830417e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java
@@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* A bit comparator which performs the specified bitwise operation on each of the bytes
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
index fd65130..1c9429f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
@@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
import com.google.common.base.Preconditions;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* Simple filter that returns first N columns on row only.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
index e5ec412..c9da5b2 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
@@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
import com.google.common.base.Preconditions;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* A filter, based on the ColumnCountGetFilter, takes two arguments: limit and offset.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
index ff6e8e2..ea84042 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
@@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
import com.google.common.base.Preconditions;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* This filter is used for selecting only those keys with columns that matches
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
index 04682c5..75324d4 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
@@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
import com.google.common.base.Preconditions;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* This filter is used for selecting only those keys with columns that are
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
index f7c6f26..5711d3c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
@@ -36,7 +36,7 @@ import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
import com.google.common.base.Preconditions;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* A filter for adding inter-column timestamp matching
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java
index b3f9a1a..aabf6d1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java
@@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
*
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index da7a084..2efde28 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* Implementation of {@link Filter} that represents an ordered List of Filters
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java
index 4d7a18a..5a2d2d4 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java
@@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* This is a Filter wrapper class which is used in the server side. Some filter
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java
index 80a1deb..4f3e605 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java
@@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
import com.google.common.base.Preconditions;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* A filter that will only return the first KV from each row.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java
index 2e9510f..5b93c97 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java
@@ -30,8 +30,8 @@ import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.ByteString;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* The filter looks for the given columns in KeyValue. Once there is a match for
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
index 500d01d..cfc9d5c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
@@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.util.UnsafeAccess;
import org.apache.hadoop.hbase.util.UnsafeAvailChecker;
import com.google.common.annotations.VisibleForTesting;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* This is optimized version of a standard FuzzyRowFilter Filters data based on fuzzy row key.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
index 1096f5e..acc05e9 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
@@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
import com.google.common.base.Preconditions;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* A Filter that stops after the given row. There is no "RowStopFilter" because
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
index 2fd5aba..0da93a2 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
@@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
import org.apache.hadoop.hbase.util.Bytes;
import com.google.common.base.Preconditions;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* A filter that will only return the key component of each KV (the value will
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/LongComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/LongComparator.java
index 9c56772..48027f4 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/LongComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/LongComparator.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.filter;
import java.nio.ByteBuffer;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
index 5f9c833..31456c7 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
@@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* Filter to support scan multiple row key ranges. It can construct the row key ranges from the
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
index 0c14649..e7d08bb 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
@@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* This filter is used for selecting only those keys with columns that matches
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
index 160232f..5d7ad83 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
@@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* A binary comparator which lexicographically compares against the specified
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java
index adc9c54..6e356cd 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java
@@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
import com.google.common.base.Preconditions;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* Implementation of Filter interface that limits results to a specific page
* size. It terminates scanning once the number of filter-passed rows is >
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
index d09ea2c..43d400f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
@@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
import com.google.common.base.Preconditions;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* Pass results that have same row prefix.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java
index 3aa3558..b148ab3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java
@@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* This filter is used to filter based on the column qualifier. It takes an
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java
index decdc78..9af8fd0 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java
@@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* A filter that includes rows based on a chance.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java
index 70dd1f9..87881ee 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java
@@ -38,7 +38,7 @@ import org.joni.Option;
import org.joni.Regex;
import org.joni.Syntax;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* This comparator is for use with {@link CompareFilter} implementations, such
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java
index 559eff8..3e52007 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java
@@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* This filter is used to filter based on the key. It takes an operator
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java
index d030fd2..8b138f2 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java
@@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* A {@link Filter} that checks a single column value, but does not emit the
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
index df4e482..aac6754 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
@@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
import com.google.common.base.Preconditions;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* This filter is used to filter cells based on value. It takes a {@link CompareFilter.CompareOp}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java
index 3aced13..1d39dc3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java
@@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* A wrapper filter that filters an entire row if any of the Cell checks do
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java
index 63fd0a3..c07114a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java
@@ -24,7 +24,7 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos;
import org.apache.hadoop.hbase.util.Bytes;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java
index f0e5afe..e48f7c5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java
@@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
import com.google.common.base.Preconditions;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* Filter that returns only cells whose timestamp (version) is
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java
index 2f679f0..e27e7ce 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java
@@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* This filter is used to filter based on column value. It takes an
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java
index e75ca49..3fb119c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java
@@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* A wrapper filter that returns true from {@link #filterAllRemaining()} as soon
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
index ec6332a..7d702f6 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
@@ -19,11 +19,11 @@
package org.apache.hadoop.hbase.ipc;
import com.google.common.annotations.VisibleForTesting;
-import com.google.protobuf.BlockingRpcChannel;
-import com.google.protobuf.Descriptors;
-import com.google.protobuf.Message;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.BlockingRpcChannel;
+import org.apache.hbase.shaded.com.google.protobuf.Descriptors;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
import java.io.IOException;
import java.net.ConnectException;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncCall.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncCall.java
index a5da0dc..71bb74f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncCall.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncCall.java
@@ -17,8 +17,8 @@
*/
package org.apache.hadoop.hbase.ipc;
-import com.google.protobuf.Descriptors;
-import com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.Descriptors;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
import io.netty.channel.EventLoop;
import io.netty.util.concurrent.DefaultPromise;
import org.apache.commons.logging.Log;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.java
index 53eb824..ecb8887 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.java
@@ -60,9 +60,9 @@ import org.apache.hadoop.security.token.TokenSelector;
import org.apache.htrace.Span;
import org.apache.htrace.Trace;
-import com.google.protobuf.Descriptors;
-import com.google.protobuf.Message;
-import com.google.protobuf.RpcCallback;
+import org.apache.hbase.shaded.com.google.protobuf.Descriptors;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.RpcCallback;
import io.netty.bootstrap.Bootstrap;
import io.netty.buffer.ByteBuf;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcClient.java
index c2bd457..f9b8cb2 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcClient.java
@@ -61,11 +61,11 @@ import org.apache.hadoop.hbase.util.PoolMap;
import org.apache.hadoop.hbase.util.Threads;
import com.google.common.annotations.VisibleForTesting;
-import com.google.protobuf.Descriptors;
-import com.google.protobuf.Message;
-import com.google.protobuf.RpcCallback;
-import com.google.protobuf.RpcChannel;
-import com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.Descriptors;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.RpcCallback;
+import org.apache.hbase.shaded.com.google.protobuf.RpcChannel;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
/**
* Netty client for the requests and responses
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncServerResponseHandler.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncServerResponseHandler.java
index e0c7586..a2bdd9f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncServerResponseHandler.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncServerResponseHandler.java
@@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.RPCProtos;
import org.apache.hadoop.ipc.RemoteException;
-import com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufInputStream;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcCallback.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcCallback.java
index 3aa59c7..c7e507b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcCallback.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcCallback.java
@@ -23,7 +23,7 @@ import java.io.InterruptedIOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import com.google.protobuf.RpcCallback;
+import org.apache.hbase.shaded.com.google.protobuf.RpcCallback;
/**
* Simple {@link RpcCallback} implementation providing a
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/Call.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/Call.java
index 5f90837..4e88ad0 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/Call.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/Call.java
@@ -17,8 +17,8 @@
*/
package org.apache.hadoop.hbase.ipc;
-import com.google.protobuf.Descriptors;
-import com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.Descriptors;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.MetricsConnection;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.java
index b1d54a4..bb751e4 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.java
@@ -26,13 +26,13 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.protobuf.ResponseConverter;
-import com.google.protobuf.BlockingRpcChannel;
-import com.google.protobuf.Descriptors;
-import com.google.protobuf.Message;
-import com.google.protobuf.RpcCallback;
-import com.google.protobuf.RpcChannel;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.BlockingRpcChannel;
+import org.apache.hbase.shaded.com.google.protobuf.Descriptors;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.RpcCallback;
+import org.apache.hbase.shaded.com.google.protobuf.RpcChannel;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
/**
* Base class which provides clients with an RPC connection to
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
index d98d81d..f48f5ec 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
@@ -46,8 +46,8 @@ import org.apache.hadoop.io.compress.Compressor;
import org.apache.hadoop.io.compress.Decompressor;
import com.google.common.base.Preconditions;
-import com.google.protobuf.CodedOutputStream;
-import com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
/**
* Utility to help ipc'ing.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java
index 6e59972..7f729b7 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java
@@ -30,9 +30,9 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse;
import org.apache.hadoop.hbase.util.ByteStringer;
-import com.google.protobuf.Descriptors;
-import com.google.protobuf.Message;
-import com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.Descriptors;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
/**
* Provides clients with an RPC connection to call coprocessor endpoint {@link com.google.protobuf.Service}s
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java
index 321dd62..f722c87 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java
@@ -33,9 +33,9 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServic
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
-import com.google.protobuf.Descriptors;
-import com.google.protobuf.Message;
-import com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.Descriptors;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
/**
* Provides clients with an RPC connection to call coprocessor endpoint {@link com.google.protobuf.Service}s
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java
index 24d2de4..6baccef 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java
@@ -24,9 +24,9 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse;
import org.apache.hadoop.hbase.util.ByteStringer;
-import com.google.protobuf.Descriptors;
-import com.google.protobuf.Message;
-import com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.Descriptors;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
/**
* Provides clients with an RPC connection to call coprocessor endpoint
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java
index 540e224..d3865c5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java
@@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hbase.ipc;
-import com.google.protobuf.BlockingRpcChannel;
+import org.apache.hbase.shaded.com.google.protobuf.BlockingRpcChannel;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.security.User;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java
index 83d4adf..9a266d8 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java
@@ -97,10 +97,10 @@ import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import com.google.common.annotations.VisibleForTesting;
-import com.google.protobuf.Descriptors.MethodDescriptor;
-import com.google.protobuf.Message;
-import com.google.protobuf.Message.Builder;
-import com.google.protobuf.RpcCallback;
+import org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.Message.Builder;
+import org.apache.hbase.shaded.com.google.protobuf.RpcCallback;
/**
* Does RPC against a cluster. Manages connections per regionserver in the cluster.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcController.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcController.java
index aa407f7..c058533 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcController.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcController.java
@@ -23,8 +23,8 @@ import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.util.StringUtils;
-import com.google.protobuf.RpcCallback;
-import com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.RpcCallback;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
/**
* Used for server-side protobuf RPC service invocations. This handler allows
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/TimeLimitedRpcController.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/TimeLimitedRpcController.java
index de502cb..dd6431e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/TimeLimitedRpcController.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/TimeLimitedRpcController.java
@@ -23,8 +23,8 @@ import java.util.concurrent.atomic.AtomicReference;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import com.google.protobuf.RpcCallback;
-import com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.RpcCallback;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
@InterfaceAudience.Private
public class TimeLimitedRpcController implements RpcController {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
index 90516ec..9b4ba84 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
@@ -167,16 +167,16 @@ import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.common.net.HostAndPort;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.CodedInputStream;
-import com.google.protobuf.InvalidProtocolBufferException;
-import com.google.protobuf.Message;
-import com.google.protobuf.Parser;
-import com.google.protobuf.RpcChannel;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.Service;
-import com.google.protobuf.ServiceException;
-import com.google.protobuf.TextFormat;
+import org.apache.hbase.shaded.com.google.protobuf.ByteString;
+import org.apache.hbase.shaded.com.google.protobuf.CodedInputStream;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.Parser;
+import org.apache.hbase.shaded.com.google.protobuf.RpcChannel;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.Service;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.TextFormat;
/**
* Protobufs utility.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java
index 45d15a3..d45eddd 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java
@@ -115,7 +115,7 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Pair;
-import com.google.protobuf.ByteString;
+import org.apache.hbase.shaded.com.google.protobuf.ByteString;
/**
* Helper utility to build protocol buffer requests,
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java
index 421907d..5160b44 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java
@@ -59,8 +59,8 @@ import org.apache.hadoop.hbase.regionserver.RegionOpeningState;
import org.apache.hadoop.hbase.security.access.UserPermission;
import org.apache.hadoop.util.StringUtils;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.ByteString;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
/**
* Helper utility to build protocol buffer responses,
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java
index 64e1a39..9a03a0c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java
@@ -29,7 +29,7 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.TokenIdentifier;
-import com.google.protobuf.ByteString;
+import org.apache.hbase.shaded.com.google.protobuf.ByteString;
/**
* Represents the identity information stored in an HBase authentication token.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
index 7527049..bb3107c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
@@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.Visibil
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
/**
* Utility client for doing visibility labels admin operations.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MasterAddressTracker.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MasterAddressTracker.java
index 6f4859a..dc8d438 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MasterAddressTracker.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MasterAddressTracker.java
@@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.data.Stat;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* Manages the location of the current active Master for the RegionServer.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java
index 0b53f95..3fcc345 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java
@@ -58,7 +58,7 @@ import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.zookeeper.KeeperException;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* Utility class to perform operation (get/wait for/verify/set/delete) on znode in ZooKeeper
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
index 9e01d09..53370b0 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
@@ -77,7 +77,7 @@ import org.apache.zookeeper.proto.DeleteRequest;
import org.apache.zookeeper.proto.SetDataRequest;
import org.apache.zookeeper.server.ZooKeeperSaslServer;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* Internal HBase utility class for ZooKeeper.
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
index 0a5a37f..29c601c 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
@@ -86,9 +86,9 @@ import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
import com.google.common.base.Stopwatch;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.ByteString;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
/**
* Test client behavior w/o setting up a cluster.
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMetricsConnection.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMetricsConnection.java
index 5191880..c45de59 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMetricsConnection.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMetricsConnection.java
@@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hbase.client;
-import com.google.protobuf.ByteString;
+import org.apache.hbase.shaded.com.google.protobuf.ByteString;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest;
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java
index 4d55c33..f3f5028 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java
@@ -41,7 +41,7 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
-import com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
/**
* Test snapshot logic from the client
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/exceptions/TestClientExceptionsUtil.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/exceptions/TestClientExceptionsUtil.java
index 968e55c..e44611c 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/exceptions/TestClientExceptionsUtil.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/exceptions/TestClientExceptionsUtil.java
@@ -18,7 +18,7 @@
*/
package org.apache.hadoop.hbase.exceptions;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
import org.junit.Test;
import java.io.IOException;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java
index c0c43ed..7e0cd5a 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.hbase;
import com.google.common.net.HostAndPort;
import com.google.common.net.InetAddresses;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
import java.io.Serializable;
import java.util.ArrayList;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/PBType.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/PBType.java
index 3d545f6..1d732b9 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/PBType.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/PBType.java
@@ -22,9 +22,9 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.Order;
import org.apache.hadoop.hbase.util.PositionedByteRange;
-import com.google.protobuf.CodedInputStream;
-import com.google.protobuf.CodedOutputStream;
-import com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.CodedInputStream;
+import org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
/**
* A base-class for {@link DataType} implementations backed by protobuf. See
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
index 7b9eb0b..593ea17 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
@@ -35,7 +35,7 @@ import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
-import com.google.protobuf.ByteString;
+import org.apache.hbase.shaded.com.google.protobuf.ByteString;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
index c9ab23c..b111be1 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
@@ -51,9 +51,9 @@ import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.regionserver.RegionScanner;
import org.apache.hadoop.hbase.util.Bytes;
-import com.google.protobuf.RpcCallback;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.Service;
+import org.apache.hbase.shaded.com.google.protobuf.RpcCallback;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.Service;
/**
* Defines a protocol to delete data in bulk based on a scan. The scan can be range scan or with
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java
index 4309cdc..188d8d0 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java
@@ -36,9 +36,9 @@ import org.apache.hadoop.hbase.protobuf.ResponseConverter;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.util.Bytes;
-import com.google.protobuf.RpcCallback;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.Service;
+import org.apache.hbase.shaded.com.google.protobuf.RpcCallback;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.Service;
/**
* Sample coprocessor endpoint exposing a Service interface for counting rows and key values.
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/BulkDeleteProtos.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/BulkDeleteProtos.java
index 373e036..51f66bb 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/BulkDeleteProtos.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/BulkDeleteProtos.java
@@ -6,10 +6,10 @@ package org.apache.hadoop.hbase.coprocessor.example.generated;
public final class BulkDeleteProtos {
private BulkDeleteProtos() {}
public static void registerAllExtensions(
- com.google.protobuf.ExtensionRegistry registry) {
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) {
}
public interface BulkDeleteRequestOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
+ extends org.apache.hbase.shaded.com.google.protobuf.MessageOrBuilder {
// required .hbase.pb.Scan scan = 1;
/**
@@ -59,14 +59,14 @@ public final class BulkDeleteProtos {
* Protobuf type {@code hbase.pb.BulkDeleteRequest}
*/
public static final class BulkDeleteRequest extends
- com.google.protobuf.GeneratedMessage
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage
implements BulkDeleteRequestOrBuilder {
// Use BulkDeleteRequest.newBuilder() to construct.
- private BulkDeleteRequest(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private BulkDeleteRequest(org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
- private BulkDeleteRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+ private BulkDeleteRequest(boolean noInit) { this.unknownFields = org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final BulkDeleteRequest defaultInstance;
public static BulkDeleteRequest getDefaultInstance() {
@@ -77,20 +77,20 @@ public final class BulkDeleteProtos {
return defaultInstance;
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
+ private final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
+ public final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private BulkDeleteRequest(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
@@ -142,40 +142,40 @@ public final class BulkDeleteProtos {
}
}
}
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
+ throw new org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
- public static final com.google.protobuf.Descriptors.Descriptor
+ public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.class, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.Builder.class);
}
- public static com.google.protobuf.Parser
* Only a single instance may be registered for a given {@link Service} subclass (the
- * instances are keyed on {@link com.google.protobuf.Descriptors.ServiceDescriptor#getFullName()}.
+ * instances are keyed on {@link org.apache.hbase.shaded.com.google.protobuf.Descriptors.ServiceDescriptor#getFullName()}.
* After the first registration, subsequent calls with the same service name will fail with
* a return value of {@code false}.
*
* Only a single instance may be registered per region for a given {@link Service} subclass (the
- * instances are keyed on {@link com.google.protobuf.Descriptors.ServiceDescriptor#getFullName()}.
+ * instances are keyed on {@link org.apache.hbase.shaded.com.google.protobuf.Descriptors.ServiceDescriptor#getFullName()}.
* After the first registration, subsequent calls with the same service name will fail with
* a return value of {@code false}.
* This is a customized version of the polymorphic hadoop
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java
index c1f9251..d43c96f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java
@@ -18,9 +18,9 @@
package org.apache.hadoop.hbase.security.access;
-import com.google.protobuf.RpcCallback;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.Service;
+import org.apache.hbase.shaded.com.google.protobuf.RpcCallback;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.Service;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java
index a7e6113..25a5d67 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.security.token;
import java.io.IOException;
-import com.google.protobuf.RpcCallback;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.Service;
+import org.apache.hbase.shaded.com.google.protobuf.RpcCallback;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.Service;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java
index 68817bc..b697344 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java
@@ -22,7 +22,7 @@ import java.io.IOException;
import java.lang.reflect.UndeclaredThrowableException;
import java.security.PrivilegedExceptionAction;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
index 6e2f8ed..db9f891 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
@@ -109,10 +109,10 @@ import org.apache.hadoop.hbase.util.Pair;
import com.google.common.collect.Lists;
import com.google.common.collect.MapMaker;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.RpcCallback;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.Service;
+import org.apache.hbase.shaded.com.google.protobuf.ByteString;
+import org.apache.hbase.shaded.com.google.protobuf.RpcCallback;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.Service;
/**
* Coprocessor that has both the MasterObserver and RegionObserver implemented that supports in
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
index df0c348..fe405f3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
@@ -18,8 +18,8 @@
package org.apache.hadoop.hbase.snapshot;
-import com.google.protobuf.CodedInputStream;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.CodedInputStream;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
import java.io.FileNotFoundException;
import java.io.IOException;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java
index 3bb3575..93a4c48 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java
@@ -18,7 +18,7 @@
package org.apache.hadoop.hbase.snapshot;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.util.ArrayList;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutputHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutputHelper.java
index 2225191..d94233b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutputHelper.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutputHelper.java
@@ -59,7 +59,7 @@ import java.util.concurrent.TimeUnit;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableMap;
-import com.google.protobuf.CodedOutputStream;
+import org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutputSaslHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutputSaslHelper.java
index 341d4ec..4d1735c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutputSaslHelper.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutputSaslHelper.java
@@ -18,24 +18,6 @@
package org.apache.hadoop.hbase.util;
import static io.netty.handler.timeout.IdleState.READER_IDLE;
-import io.netty.buffer.ByteBuf;
-import io.netty.buffer.ByteBufOutputStream;
-import io.netty.buffer.CompositeByteBuf;
-import io.netty.buffer.Unpooled;
-import io.netty.channel.Channel;
-import io.netty.channel.ChannelDuplexHandler;
-import io.netty.channel.ChannelHandlerContext;
-import io.netty.channel.ChannelOutboundHandlerAdapter;
-import io.netty.channel.ChannelPipeline;
-import io.netty.channel.ChannelPromise;
-import io.netty.channel.SimpleChannelInboundHandler;
-import io.netty.handler.codec.LengthFieldBasedFrameDecoder;
-import io.netty.handler.codec.MessageToByteEncoder;
-import io.netty.handler.codec.protobuf.ProtobufDecoder;
-import io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
-import io.netty.handler.timeout.IdleStateEvent;
-import io.netty.handler.timeout.IdleStateHandler;
-import io.netty.util.concurrent.Promise;
import java.io.IOException;
import java.lang.reflect.Constructor;
@@ -63,13 +45,6 @@ import javax.security.sasl.Sasl;
import javax.security.sasl.SaslClient;
import javax.security.sasl.SaslException;
-import com.google.common.base.Charsets;
-import com.google.common.collect.ImmutableSet;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.CodedOutputStream;
-
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -89,6 +64,31 @@ import org.apache.hadoop.security.SaslPropertiesResolver;
import org.apache.hadoop.security.SaslRpcServer.QualityOfProtection;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
+import org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream;
+
+import com.google.common.base.Charsets;
+import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.ByteBufOutputStream;
+import io.netty.buffer.CompositeByteBuf;
+import io.netty.buffer.Unpooled;
+import io.netty.channel.Channel;
+import io.netty.channel.ChannelDuplexHandler;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.ChannelOutboundHandlerAdapter;
+import io.netty.channel.ChannelPipeline;
+import io.netty.channel.ChannelPromise;
+import io.netty.channel.SimpleChannelInboundHandler;
+import io.netty.handler.codec.LengthFieldBasedFrameDecoder;
+import io.netty.handler.codec.MessageToByteEncoder;
+import io.netty.handler.codec.protobuf.ProtobufDecoder;
+import io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
+import io.netty.handler.timeout.IdleStateEvent;
+import io.netty.handler.timeout.IdleStateHandler;
+import io.netty.util.concurrent.Promise;
/**
* Helper class for adding sasl support for {@link FanOutOneBlockAsyncDFSOutput}.
@@ -654,7 +654,10 @@ public final class FanOutOneBlockAsyncDFSOutputSaslHelper {
DataTransferEncryptorMessageProto.newBuilder();
builder.setStatus(DataTransferEncryptorStatus.SUCCESS);
if (payload != null) {
- builder.setPayload(ByteString.copyFrom(payload));
+ // NOTE!!! Explicit reference to HDFS's transitively included protobuf! HBase has
+ // shaded and relocated the protobuf it uses to be at
+ // org.apache.hbase.shaded.com.google.protobuf.
+ builder.setPayload(com.google.protobuf.ByteString.copyFrom(payload));
}
if (options != null) {
CIPHER_HELPER.addCipherOptions(builder, options);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
index 9abef9c..03de0e0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
@@ -61,7 +61,7 @@ import com.google.common.collect.Lists;
import com.google.common.collect.Multimap;
import com.google.common.collect.Ordering;
import com.google.common.collect.TreeMultimap;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.commons.lang.StringUtils;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ProtoUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ProtoUtil.java
index 3c2203b..549d45a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ProtoUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ProtoUtil.java
@@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*
- * The portion of this file denoted by 'Copied from com.google.protobuf.CodedInputStream'
+ * The portion of this file denoted by 'Copied from org.apache.hbase.shaded.com.google.protobuf.CodedInputStream'
* is from Protocol Buffers v2.4.1 under the following license
*
* Copyright 2008 Google Inc. All rights reserved.
@@ -66,7 +66,7 @@ public abstract class ProtoUtil {
* @throws IOException if it is malformed or EOF.
*/
public static int readRawVarint32(DataInput in) throws IOException {
- // Copied from com.google.protobuf.CodedInputStream v2.4.1 readRawVarint32
+ // Copied from org.apache.hbase.shaded.com.google.protobuf.CodedInputStream v2.4.1 readRawVarint32
byte tmp = in.readByte();
if (tmp >= 0) {
return tmp;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKey.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKey.java
index 86fdfbd..ec9b11d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKey.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKey.java
@@ -50,7 +50,7 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import com.google.common.annotations.VisibleForTesting;
-import com.google.protobuf.ByteString;
+import org.apache.hbase.shaded.com.google.protobuf.ByteString;
/**
* A Key for an entry in the WAL.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
index 8d78480..289d6ad 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
@@ -122,8 +122,8 @@ import org.apache.hadoop.ipc.RemoteException;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
-import com.google.protobuf.ServiceException;
-import com.google.protobuf.TextFormat;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.TextFormat;
/**
* This class is responsible for splitting up a bunch of regionserver commit log
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java
index 6cd1963..bfc936d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java
@@ -54,7 +54,7 @@ import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.zookeeper.KeeperException;
-import com.google.protobuf.Service;
+import org.apache.hbase.shaded.com.google.protobuf.Service;
/**
* Basic mock region server services. Should only be instantiated by HBaseTestingUtility.b
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/QosTestHelper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/QosTestHelper.java
index 6db201f..0cf0047 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/QosTestHelper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/QosTestHelper.java
@@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hbase;
-import com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.protobuf.generated.RPCProtos;
import org.apache.hadoop.hbase.regionserver.AnnotationReadingPriorityFunction;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java
index 227db6f..61c72f7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java
@@ -48,8 +48,8 @@ import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
/**
* Test MetaTableAccessor but without spinning up a cluster.
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java
index ba6e1d4..a6b6883 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java
@@ -54,8 +54,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
/**
* Test {@link org.apache.hadoop.hbase.zookeeper.MetaTableLocator}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerLoad.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerLoad.java
index e6c17a5..ff9270f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerLoad.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerLoad.java
@@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-import com.google.protobuf.ByteString;
+import org.apache.hbase.shaded.com.google.protobuf.ByteString;
@Category({MiscTests.class, SmallTests.class})
public class TestServerLoad {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
index 10dbed0..41ae8f9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
@@ -70,7 +70,7 @@ import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
/**
* Class to test HBaseAdmin.
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
index e510d28..855497b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
@@ -66,7 +66,7 @@ import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
/**
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java
index 515e763..9835e5d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java
@@ -45,8 +45,8 @@ import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
/**
* Test the scenario where a HRegionServer#scan() call, while scanning, timeout at client side and
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientTimeouts.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientTimeouts.java
index 36276fa..62a3efc 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientTimeouts.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientTimeouts.java
@@ -46,11 +46,11 @@ import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-import com.google.protobuf.BlockingRpcChannel;
-import com.google.protobuf.Descriptors.MethodDescriptor;
-import com.google.protobuf.Message;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.BlockingRpcChannel;
+import org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
@Category({MediumTests.class, ClientTests.class})
public class TestClientTimeouts {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java
index 45093bb..edabdb3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java
@@ -58,8 +58,8 @@ import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
@Category({SmallTests.class, ClientTests.class})
public class TestHBaseAdminNoCluster {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaCache.java
index 23b9eed..d8237b5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaCache.java
@@ -17,8 +17,8 @@
*/
package org.apache.hadoop.hbase.client;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.*;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java
index 4e4ff5e..9e6f718 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java
@@ -36,9 +36,9 @@ import org.apache.hadoop.hbase.protobuf.ResponseConverter;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.util.Bytes;
-import com.google.protobuf.RpcCallback;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.Service;
+import org.apache.hbase.shaded.com.google.protobuf.RpcCallback;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.Service;
/**
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java
index 6c0ea49..7e8a287 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java
@@ -37,9 +37,9 @@ import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.util.Bytes;
-import com.google.protobuf.RpcCallback;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.Service;
+import org.apache.hbase.shaded.com.google.protobuf.RpcCallback;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.Service;
/**
* Test coprocessor endpoint that always returns {@code null} for requests to the last region
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java
index 32d978c..b380b7e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java
@@ -38,9 +38,9 @@ import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.util.Bytes;
-import com.google.protobuf.RpcCallback;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.Service;
+import org.apache.hbase.shaded.com.google.protobuf.RpcCallback;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.Service;
/**
* Test coprocessor endpoint that always throws a {@link DoNotRetryIOException} for requests on
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ProtobufCoprocessorService.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ProtobufCoprocessorService.java
index cdda28a..1a09d80 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ProtobufCoprocessorService.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ProtobufCoprocessorService.java
@@ -18,9 +18,9 @@
package org.apache.hadoop.hbase.coprocessor;
-import com.google.protobuf.RpcCallback;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.Service;
+import org.apache.hbase.shaded.com.google.protobuf.RpcCallback;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.Service;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java
index d62e950..41a6961 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java
@@ -50,7 +50,7 @@ import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
/**
* TestEndpoint: test cases to verify the batch execution of coprocessor Endpoint
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java
index 1768a2a..987d871 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java
@@ -60,8 +60,8 @@ import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
/**
* TestEndpoint: test cases to verify coprocessor Endpoint
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorTableEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorTableEndpoint.java
index 7695361..a56993d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorTableEndpoint.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorTableEndpoint.java
@@ -42,7 +42,7 @@ import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
@Category({CoprocessorTests.class, MediumTests.class})
public class TestCoprocessorTableEndpoint {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorEndpoint.java
index 1484c34..89b8127 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorEndpoint.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorEndpoint.java
@@ -40,9 +40,9 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-import com.google.protobuf.RpcCallback;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.Service;
+import org.apache.hbase.shaded.com.google.protobuf.RpcCallback;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.Service;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
index b3d3890..c9db9b9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
@@ -74,7 +74,7 @@ import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-import com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationProtos.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationProtos.java
index 61b47ff..6c36ac1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationProtos.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationProtos.java
@@ -6,10 +6,10 @@ package org.apache.hadoop.hbase.coprocessor.protobuf.generated;
public final class ColumnAggregationProtos {
private ColumnAggregationProtos() {}
public static void registerAllExtensions(
- com.google.protobuf.ExtensionRegistry registry) {
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) {
}
public interface SumRequestOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
+ extends org.apache.hbase.shaded.com.google.protobuf.MessageOrBuilder {
// required bytes family = 1;
/**
@@ -19,7 +19,7 @@ public final class ColumnAggregationProtos {
/**
* ROW = 0;
*/
@@ -232,27 +232,27 @@ public final class BulkDeleteProtos {
}
}
- public static com.google.protobuf.Internal.EnumLiteMaprequired .hbase.pb.Scan scan = 1;
@@ -837,11 +837,11 @@ public final class BulkDeleteProtos {
/**
* required .hbase.pb.Scan scan = 1;
*/
- private com.google.protobuf.SingleFieldBuilder<
+ private org.apache.hbase.shaded.com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>
getScanFieldBuilder() {
if (scanBuilder_ == null) {
- scanBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ scanBuilder_ = new org.apache.hbase.shaded.com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>(
scan_,
getParentForChildren(),
@@ -965,7 +965,7 @@ public final class BulkDeleteProtos {
}
public interface BulkDeleteResponseOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
+ extends org.apache.hbase.shaded.com.google.protobuf.MessageOrBuilder {
// required uint64 rowsDeleted = 1;
/**
@@ -991,14 +991,14 @@ public final class BulkDeleteProtos {
* Protobuf type {@code hbase.pb.BulkDeleteResponse}
*/
public static final class BulkDeleteResponse extends
- com.google.protobuf.GeneratedMessage
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage
implements BulkDeleteResponseOrBuilder {
// Use BulkDeleteResponse.newBuilder() to construct.
- private BulkDeleteResponse(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private BulkDeleteResponse(org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
- private BulkDeleteResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+ private BulkDeleteResponse(boolean noInit) { this.unknownFields = org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final BulkDeleteResponse defaultInstance;
public static BulkDeleteResponse getDefaultInstance() {
@@ -1009,20 +1009,20 @@ public final class BulkDeleteProtos {
return defaultInstance;
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
+ private final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
+ public final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private BulkDeleteResponse(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
@@ -1050,40 +1050,40 @@ public final class BulkDeleteProtos {
}
}
}
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
+ throw new org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
- public static final com.google.protobuf.Descriptors.Descriptor
+ public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.Builder.class);
}
- public static com.google.protobuf.Parserrpc delete(.hbase.pb.BulkDeleteRequest) returns (.hbase.pb.BulkDeleteResponse);
*/
public abstract void delete(
- com.google.protobuf.RpcController controller,
+ org.apache.hbase.shaded.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request,
- com.google.protobuf.RpcCallbackrpc delete(.hbase.pb.BulkDeleteRequest) returns (.hbase.pb.BulkDeleteResponse);
*/
public abstract void delete(
- com.google.protobuf.RpcController controller,
+ org.apache.hbase.shaded.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request,
- com.google.protobuf.RpcCallbackrpc getRowCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse);
*/
public abstract void getRowCount(
- com.google.protobuf.RpcController controller,
+ org.apache.hbase.shaded.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request,
- com.google.protobuf.RpcCallbackrpc getKeyValueCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse);
*/
public abstract void getKeyValueCount(
- com.google.protobuf.RpcController controller,
+ org.apache.hbase.shaded.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request,
- com.google.protobuf.RpcCallbackrpc getRowCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse);
*/
public abstract void getRowCount(
- com.google.protobuf.RpcController controller,
+ org.apache.hbase.shaded.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request,
- com.google.protobuf.RpcCallbackrpc getKeyValueCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse);
*/
public abstract void getKeyValueCount(
- com.google.protobuf.RpcController controller,
+ org.apache.hbase.shaded.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request,
- com.google.protobuf.RpcCallbackoptional bytes column = 2;
*/
- com.google.protobuf.ByteString getColumn();
+ org.apache.hbase.shaded.com.google.protobuf.ByteString getColumn();
// optional int64 timestamp = 3;
/**
@@ -57,20 +57,20 @@ public final class CellMessage {
/**
* optional bytes data = 4;
*/
- com.google.protobuf.ByteString getData();
+ org.apache.hbase.shaded.com.google.protobuf.ByteString getData();
}
/**
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.Cell}
*/
public static final class Cell extends
- com.google.protobuf.GeneratedMessage
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage
implements CellOrBuilder {
// Use Cell.newBuilder() to construct.
- private Cell(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private Cell(org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
- private Cell(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+ private Cell(boolean noInit) { this.unknownFields = org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Cell defaultInstance;
public static Cell getDefaultInstance() {
@@ -81,20 +81,20 @@ public final class CellMessage {
return defaultInstance;
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
+ private final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
+ public final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Cell(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
@@ -132,47 +132,47 @@ public final class CellMessage {
}
}
}
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
+ throw new org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
- public static final com.google.protobuf.Descriptors.Descriptor
+ public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.class, org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.Builder.class);
}
- public static com.google.protobuf.Parseroptional bytes row = 1;
*
@@ -190,13 +190,13 @@ public final class CellMessage {
* unused if Cell is in a CellSet
*
*/
- public com.google.protobuf.ByteString getRow() {
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString getRow() {
return row_;
}
// optional bytes column = 2;
public static final int COLUMN_FIELD_NUMBER = 2;
- private com.google.protobuf.ByteString column_;
+ private org.apache.hbase.shaded.com.google.protobuf.ByteString column_;
/**
* optional bytes column = 2;
*/
@@ -206,7 +206,7 @@ public final class CellMessage {
/**
* optional bytes column = 2;
*/
- public com.google.protobuf.ByteString getColumn() {
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString getColumn() {
return column_;
}
@@ -228,7 +228,7 @@ public final class CellMessage {
// optional bytes data = 4;
public static final int DATA_FIELD_NUMBER = 4;
- private com.google.protobuf.ByteString data_;
+ private org.apache.hbase.shaded.com.google.protobuf.ByteString data_;
/**
* optional bytes data = 4;
*/
@@ -238,15 +238,15 @@ public final class CellMessage {
/**
* optional bytes data = 4;
*/
- public com.google.protobuf.ByteString getData() {
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString getData() {
return data_;
}
private void initFields() {
- row_ = com.google.protobuf.ByteString.EMPTY;
- column_ = com.google.protobuf.ByteString.EMPTY;
+ row_ = org.apache.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
+ column_ = org.apache.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
timestamp_ = 0L;
- data_ = com.google.protobuf.ByteString.EMPTY;
+ data_ = org.apache.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
@@ -257,7 +257,7 @@ public final class CellMessage {
return true;
}
- public void writeTo(com.google.protobuf.CodedOutputStream output)
+ public void writeTo(org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
@@ -282,19 +282,19 @@ public final class CellMessage {
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(1, row_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(2, column_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeInt64Size(3, timestamp_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(4, data_);
}
size += getUnknownFields().getSerializedSize();
@@ -310,24 +310,24 @@ public final class CellMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(java.io.InputStream input)
@@ -336,7 +336,7 @@ public final class CellMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -346,18 +346,18 @@ public final class CellMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseDelimitedFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
- com.google.protobuf.CodedInputStream input)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -371,7 +371,7 @@ public final class CellMessage {
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -379,14 +379,14 @@ public final class CellMessage {
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.Cell}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builderoptional bytes row = 1;
*
@@ -539,7 +539,7 @@ public final class CellMessage {
* unused if Cell is in a CellSet
*
*/
- public com.google.protobuf.ByteString getRow() {
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString getRow() {
return row_;
}
/**
@@ -549,7 +549,7 @@ public final class CellMessage {
* unused if Cell is in a CellSet
*
*/
- public Builder setRow(com.google.protobuf.ByteString value) {
+ public Builder setRow(org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -573,7 +573,7 @@ public final class CellMessage {
}
// optional bytes column = 2;
- private com.google.protobuf.ByteString column_ = com.google.protobuf.ByteString.EMPTY;
+ private org.apache.hbase.shaded.com.google.protobuf.ByteString column_ = org.apache.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
/**
* optional bytes column = 2;
*/
@@ -583,13 +583,13 @@ public final class CellMessage {
/**
* optional bytes column = 2;
*/
- public com.google.protobuf.ByteString getColumn() {
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString getColumn() {
return column_;
}
/**
* optional bytes column = 2;
*/
- public Builder setColumn(com.google.protobuf.ByteString value) {
+ public Builder setColumn(org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -642,7 +642,7 @@ public final class CellMessage {
}
// optional bytes data = 4;
- private com.google.protobuf.ByteString data_ = com.google.protobuf.ByteString.EMPTY;
+ private org.apache.hbase.shaded.com.google.protobuf.ByteString data_ = org.apache.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
/**
* optional bytes data = 4;
*/
@@ -652,13 +652,13 @@ public final class CellMessage {
/**
* optional bytes data = 4;
*/
- public com.google.protobuf.ByteString getData() {
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString getData() {
return data_;
}
/**
* optional bytes data = 4;
*/
- public Builder setData(com.google.protobuf.ByteString value) {
+ public Builder setData(org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -688,17 +688,17 @@ public final class CellMessage {
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.Cell)
}
- private static com.google.protobuf.Descriptors.Descriptor
+ private static org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_descriptor;
private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_fieldAccessorTable;
- public static com.google.protobuf.Descriptors.FileDescriptor
+ public static org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
- private static com.google.protobuf.Descriptors.FileDescriptor
+ private static org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
@@ -707,23 +707,23 @@ public final class CellMessage {
"row\030\001 \001(\014\022\016\n\006column\030\002 \001(\014\022\021\n\ttimestamp\030\003" +
" \001(\003\022\014\n\004data\030\004 \001(\014"
};
- com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
- new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
- public com.google.protobuf.ExtensionRegistry assignDescriptors(
- com.google.protobuf.Descriptors.FileDescriptor root) {
+ org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+ new org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+ public org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors(
+ org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_descriptor,
new java.lang.String[] { "Row", "Column", "Timestamp", "Data", });
return null;
}
};
- com.google.protobuf.Descriptors.FileDescriptor
+ org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
- new com.google.protobuf.Descriptors.FileDescriptor[] {
+ new org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
}
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/CellSetMessage.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/CellSetMessage.java
index f35a25f..6b962da 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/CellSetMessage.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/CellSetMessage.java
@@ -6,10 +6,10 @@ package org.apache.hadoop.hbase.rest.protobuf.generated;
public final class CellSetMessage {
private CellSetMessage() {}
public static void registerAllExtensions(
- com.google.protobuf.ExtensionRegistry registry) {
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) {
}
public interface CellSetOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
+ extends org.apache.hbase.shaded.com.google.protobuf.MessageOrBuilder {
// repeated .org.apache.hadoop.hbase.rest.protobuf.generated.CellSet.Row rows = 1;
/**
@@ -40,14 +40,14 @@ public final class CellSetMessage {
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.CellSet}
*/
public static final class CellSet extends
- com.google.protobuf.GeneratedMessage
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage
implements CellSetOrBuilder {
// Use CellSet.newBuilder() to construct.
- private CellSet(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private CellSet(org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
- private CellSet(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+ private CellSet(boolean noInit) { this.unknownFields = org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final CellSet defaultInstance;
public static CellSet getDefaultInstance() {
@@ -58,20 +58,20 @@ public final class CellSetMessage {
return defaultInstance;
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
+ private final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
+ public final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private CellSet(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
@@ -97,10 +97,10 @@ public final class CellSetMessage {
}
}
}
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
+ throw new org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
@@ -110,35 +110,35 @@ public final class CellSetMessage {
makeExtensionsImmutable();
}
}
- public static final com.google.protobuf.Descriptors.Descriptor
+ public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_CellSet_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_CellSet_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.class, org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Builder.class);
}
- public static com.google.protobuf.Parserrequired bytes key = 1;
*/
- com.google.protobuf.ByteString getKey();
+ org.apache.hbase.shaded.com.google.protobuf.ByteString getKey();
// repeated .org.apache.hadoop.hbase.rest.protobuf.generated.Cell values = 2;
/**
@@ -179,14 +179,14 @@ public final class CellSetMessage {
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.CellSet.Row}
*/
public static final class Row extends
- com.google.protobuf.GeneratedMessage
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage
implements RowOrBuilder {
// Use Row.newBuilder() to construct.
- private Row(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private Row(org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
- private Row(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+ private Row(boolean noInit) { this.unknownFields = org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Row defaultInstance;
public static Row getDefaultInstance() {
@@ -197,20 +197,20 @@ public final class CellSetMessage {
return defaultInstance;
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
+ private final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
+ public final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Row(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
@@ -241,10 +241,10 @@ public final class CellSetMessage {
}
}
}
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
+ throw new org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
@@ -254,37 +254,37 @@ public final class CellSetMessage {
makeExtensionsImmutable();
}
}
- public static final com.google.protobuf.Descriptors.Descriptor
+ public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_CellSet_Row_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_CellSet_Row_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row.class, org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row.Builder.class);
}
- public static com.google.protobuf.Parserrequired bytes key = 1;
*/
@@ -294,7 +294,7 @@ public final class CellSetMessage {
/**
* required bytes key = 1;
*/
- public com.google.protobuf.ByteString getKey() {
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString getKey() {
return key_;
}
@@ -335,7 +335,7 @@ public final class CellSetMessage {
}
private void initFields() {
- key_ = com.google.protobuf.ByteString.EMPTY;
+ key_ = org.apache.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
values_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
@@ -351,7 +351,7 @@ public final class CellSetMessage {
return true;
}
- public void writeTo(com.google.protobuf.CodedOutputStream output)
+ public void writeTo(org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
@@ -370,11 +370,11 @@ public final class CellSetMessage {
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(1, key_);
}
for (int i = 0; i < values_.size(); i++) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeMessageSize(2, values_.get(i));
}
size += getUnknownFields().getSerializedSize();
@@ -390,24 +390,24 @@ public final class CellSetMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row parseFrom(
byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row parseFrom(java.io.InputStream input)
@@ -416,7 +416,7 @@ public final class CellSetMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row parseFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -426,18 +426,18 @@ public final class CellSetMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row parseDelimitedFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row parseFrom(
- com.google.protobuf.CodedInputStream input)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -451,7 +451,7 @@ public final class CellSetMessage {
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -459,14 +459,14 @@ public final class CellSetMessage {
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.CellSet.Row}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builderrequired bytes key = 1;
*/
@@ -630,13 +630,13 @@ public final class CellSetMessage {
/**
* required bytes key = 1;
*/
- public com.google.protobuf.ByteString getKey() {
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString getKey() {
return key_;
}
/**
* required bytes key = 1;
*/
- public Builder setKey(com.google.protobuf.ByteString value) {
+ public Builder setKey(org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -665,7 +665,7 @@ public final class CellSetMessage {
}
}
- private com.google.protobuf.RepeatedFieldBuilder<
+ private org.apache.hbase.shaded.com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell, org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.Builder, org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.CellOrBuilder> valuesBuilder_;
/**
@@ -880,11 +880,11 @@ public final class CellSetMessage {
getValuesBuilderList() {
return getValuesFieldBuilder().getBuilderList();
}
- private com.google.protobuf.RepeatedFieldBuilder<
+ private org.apache.hbase.shaded.com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell, org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.Builder, org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.CellOrBuilder>
getValuesFieldBuilder() {
if (valuesBuilder_ == null) {
- valuesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+ valuesBuilder_ = new org.apache.hbase.shaded.com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell, org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.Builder, org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.CellOrBuilder>(
values_,
((bitField0_ & 0x00000002) == 0x00000002),
@@ -960,7 +960,7 @@ public final class CellSetMessage {
return true;
}
- public void writeTo(com.google.protobuf.CodedOutputStream output)
+ public void writeTo(org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < rows_.size(); i++) {
@@ -976,7 +976,7 @@ public final class CellSetMessage {
size = 0;
for (int i = 0; i < rows_.size(); i++) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeMessageSize(1, rows_.get(i));
}
size += getUnknownFields().getSerializedSize();
@@ -992,24 +992,24 @@ public final class CellSetMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet parseFrom(
byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet parseFrom(java.io.InputStream input)
@@ -1018,7 +1018,7 @@ public final class CellSetMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet parseFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -1028,18 +1028,18 @@ public final class CellSetMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet parseDelimitedFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet parseFrom(
- com.google.protobuf.CodedInputStream input)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -1053,7 +1053,7 @@ public final class CellSetMessage {
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -1061,14 +1061,14 @@ public final class CellSetMessage {
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.CellSet}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builderoptional string name = 1;
*/
- com.google.protobuf.ByteString
+ org.apache.hbase.shaded.com.google.protobuf.ByteString
getNameBytes();
// repeated .org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchema.Attribute attrs = 2;
@@ -91,21 +91,21 @@ public final class ColumnSchemaMessage {
/**
* optional string compression = 5;
*/
- com.google.protobuf.ByteString
+ org.apache.hbase.shaded.com.google.protobuf.ByteString
getCompressionBytes();
}
/**
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchema}
*/
public static final class ColumnSchema extends
- com.google.protobuf.GeneratedMessage
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage
implements ColumnSchemaOrBuilder {
// Use ColumnSchema.newBuilder() to construct.
- private ColumnSchema(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private ColumnSchema(org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
- private ColumnSchema(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+ private ColumnSchema(boolean noInit) { this.unknownFields = org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final ColumnSchema defaultInstance;
public static ColumnSchema getDefaultInstance() {
@@ -116,20 +116,20 @@ public final class ColumnSchemaMessage {
return defaultInstance;
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
+ private final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
+ public final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ColumnSchema(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
@@ -175,10 +175,10 @@ public final class ColumnSchemaMessage {
}
}
}
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
+ throw new org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
@@ -188,35 +188,35 @@ public final class ColumnSchemaMessage {
makeExtensionsImmutable();
}
}
- public static final com.google.protobuf.Descriptors.Descriptor
+ public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.class, org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Builder.class);
}
- public static com.google.protobuf.Parserrequired string name = 1;
*/
- com.google.protobuf.ByteString
+ org.apache.hbase.shaded.com.google.protobuf.ByteString
getNameBytes();
// required string value = 2;
@@ -245,21 +245,21 @@ public final class ColumnSchemaMessage {
/**
* required string value = 2;
*/
- com.google.protobuf.ByteString
+ org.apache.hbase.shaded.com.google.protobuf.ByteString
getValueBytes();
}
/**
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchema.Attribute}
*/
public static final class Attribute extends
- com.google.protobuf.GeneratedMessage
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage
implements AttributeOrBuilder {
// Use Attribute.newBuilder() to construct.
- private Attribute(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private Attribute(org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
- private Attribute(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+ private Attribute(boolean noInit) { this.unknownFields = org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Attribute defaultInstance;
public static Attribute getDefaultInstance() {
@@ -270,20 +270,20 @@ public final class ColumnSchemaMessage {
return defaultInstance;
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
+ private final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
+ public final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Attribute(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
@@ -311,40 +311,40 @@ public final class ColumnSchemaMessage {
}
}
}
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
+ throw new org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
- public static final com.google.protobuf.Descriptors.Descriptor
+ public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_Attribute_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_Attribute_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.class, org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.Builder.class);
}
- public static com.google.protobuf.Parserrequired string name = 1;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
@@ -409,8 +409,8 @@ public final class ColumnSchemaMessage {
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
+ org.apache.hbase.shaded.com.google.protobuf.ByteString bs =
+ (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
value_ = s;
@@ -421,17 +421,17 @@ public final class ColumnSchemaMessage {
/**
* required string value = 2;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getValueBytes() {
java.lang.Object ref = value_;
if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
value_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
@@ -456,7 +456,7 @@ public final class ColumnSchemaMessage {
return true;
}
- public void writeTo(com.google.protobuf.CodedOutputStream output)
+ public void writeTo(org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
@@ -475,11 +475,11 @@ public final class ColumnSchemaMessage {
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getNameBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(2, getValueBytes());
}
size += getUnknownFields().getSerializedSize();
@@ -495,24 +495,24 @@ public final class ColumnSchemaMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(java.io.InputStream input)
@@ -521,7 +521,7 @@ public final class ColumnSchemaMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -531,18 +531,18 @@ public final class ColumnSchemaMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseDelimitedFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
- com.google.protobuf.CodedInputStream input)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -556,7 +556,7 @@ public final class ColumnSchemaMessage {
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -564,14 +564,14 @@ public final class ColumnSchemaMessage {
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchema.Attribute}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builderrequired string name = 1;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
/**
@@ -763,7 +763,7 @@ public final class ColumnSchemaMessage {
* required string name = 1;
*/
public Builder setNameBytes(
- com.google.protobuf.ByteString value) {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -787,7 +787,7 @@ public final class ColumnSchemaMessage {
public java.lang.String getValue() {
java.lang.Object ref = value_;
if (!(ref instanceof java.lang.String)) {
- java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ java.lang.String s = ((org.apache.hbase.shaded.com.google.protobuf.ByteString) ref)
.toStringUtf8();
value_ = s;
return s;
@@ -798,17 +798,17 @@ public final class ColumnSchemaMessage {
/**
* required string value = 2;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getValueBytes() {
java.lang.Object ref = value_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
value_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
/**
@@ -837,7 +837,7 @@ public final class ColumnSchemaMessage {
* required string value = 2;
*/
public Builder setValueBytes(
- com.google.protobuf.ByteString value) {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -876,8 +876,8 @@ public final class ColumnSchemaMessage {
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
+ org.apache.hbase.shaded.com.google.protobuf.ByteString bs =
+ (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
name_ = s;
@@ -888,17 +888,17 @@ public final class ColumnSchemaMessage {
/**
* optional string name = 1;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
@@ -995,8 +995,8 @@ public final class ColumnSchemaMessage {
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
+ org.apache.hbase.shaded.com.google.protobuf.ByteString bs =
+ (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
compression_ = s;
@@ -1007,17 +1007,17 @@ public final class ColumnSchemaMessage {
/**
* optional string compression = 5;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getCompressionBytes() {
java.lang.Object ref = compression_;
if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
compression_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
@@ -1043,7 +1043,7 @@ public final class ColumnSchemaMessage {
return true;
}
- public void writeTo(com.google.protobuf.CodedOutputStream output)
+ public void writeTo(org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
@@ -1071,23 +1071,23 @@ public final class ColumnSchemaMessage {
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getNameBytes());
}
for (int i = 0; i < attrs_.size(); i++) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeMessageSize(2, attrs_.get(i));
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeInt32Size(3, ttl_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeInt32Size(4, maxVersions_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(5, getCompressionBytes());
}
size += getUnknownFields().getSerializedSize();
@@ -1103,24 +1103,24 @@ public final class ColumnSchemaMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(java.io.InputStream input)
@@ -1129,7 +1129,7 @@ public final class ColumnSchemaMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -1139,18 +1139,18 @@ public final class ColumnSchemaMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseDelimitedFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
- com.google.protobuf.CodedInputStream input)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -1164,7 +1164,7 @@ public final class ColumnSchemaMessage {
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -1172,14 +1172,14 @@ public final class ColumnSchemaMessage {
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchema}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builderoptional string name = 1;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
/**
@@ -1429,7 +1429,7 @@ public final class ColumnSchemaMessage {
* optional string name = 1;
*/
public Builder setNameBytes(
- com.google.protobuf.ByteString value) {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -1449,7 +1449,7 @@ public final class ColumnSchemaMessage {
}
}
- private com.google.protobuf.RepeatedFieldBuilder<
+ private org.apache.hbase.shaded.com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute, org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.Builder, org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.AttributeOrBuilder> attrsBuilder_;
/**
@@ -1664,11 +1664,11 @@ public final class ColumnSchemaMessage {
getAttrsBuilderList() {
return getAttrsFieldBuilder().getBuilderList();
}
- private com.google.protobuf.RepeatedFieldBuilder<
+ private org.apache.hbase.shaded.com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute, org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.Builder, org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.AttributeOrBuilder>
getAttrsFieldBuilder() {
if (attrsBuilder_ == null) {
- attrsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+ attrsBuilder_ = new org.apache.hbase.shaded.com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute, org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.Builder, org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.AttributeOrBuilder>(
attrs_,
((bitField0_ & 0x00000002) == 0x00000002),
@@ -1775,7 +1775,7 @@ public final class ColumnSchemaMessage {
public java.lang.String getCompression() {
java.lang.Object ref = compression_;
if (!(ref instanceof java.lang.String)) {
- java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ java.lang.String s = ((org.apache.hbase.shaded.com.google.protobuf.ByteString) ref)
.toStringUtf8();
compression_ = s;
return s;
@@ -1786,17 +1786,17 @@ public final class ColumnSchemaMessage {
/**
* optional string compression = 5;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getCompressionBytes() {
java.lang.Object ref = compression_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
compression_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
/**
@@ -1825,7 +1825,7 @@ public final class ColumnSchemaMessage {
* optional string compression = 5;
*/
public Builder setCompressionBytes(
- com.google.protobuf.ByteString value) {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -1846,22 +1846,22 @@ public final class ColumnSchemaMessage {
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchema)
}
- private static com.google.protobuf.Descriptors.Descriptor
+ private static org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_descriptor;
private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_fieldAccessorTable;
- private static com.google.protobuf.Descriptors.Descriptor
+ private static org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_Attribute_descriptor;
private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_Attribute_fieldAccessorTable;
- public static com.google.protobuf.Descriptors.FileDescriptor
+ public static org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
- private static com.google.protobuf.Descriptors.FileDescriptor
+ private static org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
@@ -1874,29 +1874,29 @@ public final class ColumnSchemaMessage {
"sion\030\005 \001(\t\032(\n\tAttribute\022\014\n\004name\030\001 \002(\t\022\r\n" +
"\005value\030\002 \002(\t"
};
- com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
- new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
- public com.google.protobuf.ExtensionRegistry assignDescriptors(
- com.google.protobuf.Descriptors.FileDescriptor root) {
+ org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+ new org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+ public org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors(
+ org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_descriptor,
new java.lang.String[] { "Name", "Attrs", "Ttl", "MaxVersions", "Compression", });
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_Attribute_descriptor =
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_descriptor.getNestedTypes().get(0);
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_Attribute_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_Attribute_descriptor,
new java.lang.String[] { "Name", "Value", });
return null;
}
};
- com.google.protobuf.Descriptors.FileDescriptor
+ org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
- new com.google.protobuf.Descriptors.FileDescriptor[] {
+ new org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
}
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/NamespacePropertiesMessage.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/NamespacePropertiesMessage.java
index 3b5a13a..45a7e82 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/NamespacePropertiesMessage.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/NamespacePropertiesMessage.java
@@ -6,10 +6,10 @@ package org.apache.hadoop.hbase.rest.protobuf.generated;
public final class NamespacePropertiesMessage {
private NamespacePropertiesMessage() {}
public static void registerAllExtensions(
- com.google.protobuf.ExtensionRegistry registry) {
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) {
}
public interface NamespacePropertiesOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
+ extends org.apache.hbase.shaded.com.google.protobuf.MessageOrBuilder {
// repeated .org.apache.hadoop.hbase.rest.protobuf.generated.NamespaceProperties.Property props = 1;
/**
@@ -40,14 +40,14 @@ public final class NamespacePropertiesMessage {
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.NamespaceProperties}
*/
public static final class NamespaceProperties extends
- com.google.protobuf.GeneratedMessage
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage
implements NamespacePropertiesOrBuilder {
// Use NamespaceProperties.newBuilder() to construct.
- private NamespaceProperties(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private NamespaceProperties(org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
- private NamespaceProperties(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+ private NamespaceProperties(boolean noInit) { this.unknownFields = org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final NamespaceProperties defaultInstance;
public static NamespaceProperties getDefaultInstance() {
@@ -58,20 +58,20 @@ public final class NamespacePropertiesMessage {
return defaultInstance;
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
+ private final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
+ public final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private NamespaceProperties(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
@@ -97,10 +97,10 @@ public final class NamespacePropertiesMessage {
}
}
}
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
+ throw new org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
@@ -110,35 +110,35 @@ public final class NamespacePropertiesMessage {
makeExtensionsImmutable();
}
}
- public static final com.google.protobuf.Descriptors.Descriptor
+ public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.rest.protobuf.generated.NamespacePropertiesMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_NamespaceProperties_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.rest.protobuf.generated.NamespacePropertiesMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_NamespaceProperties_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.rest.protobuf.generated.NamespacePropertiesMessage.NamespaceProperties.class, org.apache.hadoop.hbase.rest.protobuf.generated.NamespacePropertiesMessage.NamespaceProperties.Builder.class);
}
- public static com.google.protobuf.Parserrequired string key = 1;
*/
- com.google.protobuf.ByteString
+ org.apache.hbase.shaded.com.google.protobuf.ByteString
getKeyBytes();
// required string value = 2;
@@ -167,21 +167,21 @@ public final class NamespacePropertiesMessage {
/**
* required string value = 2;
*/
- com.google.protobuf.ByteString
+ org.apache.hbase.shaded.com.google.protobuf.ByteString
getValueBytes();
}
/**
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.NamespaceProperties.Property}
*/
public static final class Property extends
- com.google.protobuf.GeneratedMessage
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage
implements PropertyOrBuilder {
// Use Property.newBuilder() to construct.
- private Property(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private Property(org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
- private Property(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+ private Property(boolean noInit) { this.unknownFields = org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Property defaultInstance;
public static Property getDefaultInstance() {
@@ -192,20 +192,20 @@ public final class NamespacePropertiesMessage {
return defaultInstance;
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
+ private final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
+ public final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Property(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
@@ -233,40 +233,40 @@ public final class NamespacePropertiesMessage {
}
}
}
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
+ throw new org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
- public static final com.google.protobuf.Descriptors.Descriptor
+ public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.rest.protobuf.generated.NamespacePropertiesMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_NamespaceProperties_Property_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.rest.protobuf.generated.NamespacePropertiesMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_NamespaceProperties_Property_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.rest.protobuf.generated.NamespacePropertiesMessage.NamespaceProperties.Property.class, org.apache.hadoop.hbase.rest.protobuf.generated.NamespacePropertiesMessage.NamespaceProperties.Property.Builder.class);
}
- public static com.google.protobuf.Parserrequired string key = 1;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getKeyBytes() {
java.lang.Object ref = key_;
if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
key_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
@@ -331,8 +331,8 @@ public final class NamespacePropertiesMessage {
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
+ org.apache.hbase.shaded.com.google.protobuf.ByteString bs =
+ (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
value_ = s;
@@ -343,17 +343,17 @@ public final class NamespacePropertiesMessage {
/**
* required string value = 2;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getValueBytes() {
java.lang.Object ref = value_;
if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
value_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
@@ -378,7 +378,7 @@ public final class NamespacePropertiesMessage {
return true;
}
- public void writeTo(com.google.protobuf.CodedOutputStream output)
+ public void writeTo(org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
@@ -397,11 +397,11 @@ public final class NamespacePropertiesMessage {
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getKeyBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(2, getValueBytes());
}
size += getUnknownFields().getSerializedSize();
@@ -417,24 +417,24 @@ public final class NamespacePropertiesMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.NamespacePropertiesMessage.NamespaceProperties.Property parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.NamespacePropertiesMessage.NamespaceProperties.Property parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.NamespacePropertiesMessage.NamespaceProperties.Property parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.NamespacePropertiesMessage.NamespaceProperties.Property parseFrom(
byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.NamespacePropertiesMessage.NamespaceProperties.Property parseFrom(java.io.InputStream input)
@@ -443,7 +443,7 @@ public final class NamespacePropertiesMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.NamespacePropertiesMessage.NamespaceProperties.Property parseFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -453,18 +453,18 @@ public final class NamespacePropertiesMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.NamespacePropertiesMessage.NamespaceProperties.Property parseDelimitedFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.NamespacePropertiesMessage.NamespaceProperties.Property parseFrom(
- com.google.protobuf.CodedInputStream input)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.NamespacePropertiesMessage.NamespaceProperties.Property parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -478,7 +478,7 @@ public final class NamespacePropertiesMessage {
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -486,14 +486,14 @@ public final class NamespacePropertiesMessage {
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.NamespaceProperties.Property}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builderrequired string key = 1;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getKeyBytes() {
java.lang.Object ref = key_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
key_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
/**
@@ -685,7 +685,7 @@ public final class NamespacePropertiesMessage {
* required string key = 1;
*/
public Builder setKeyBytes(
- com.google.protobuf.ByteString value) {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -709,7 +709,7 @@ public final class NamespacePropertiesMessage {
public java.lang.String getValue() {
java.lang.Object ref = value_;
if (!(ref instanceof java.lang.String)) {
- java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ java.lang.String s = ((org.apache.hbase.shaded.com.google.protobuf.ByteString) ref)
.toStringUtf8();
value_ = s;
return s;
@@ -720,17 +720,17 @@ public final class NamespacePropertiesMessage {
/**
* required string value = 2;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getValueBytes() {
java.lang.Object ref = value_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
value_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
/**
@@ -759,7 +759,7 @@ public final class NamespacePropertiesMessage {
* required string value = 2;
*/
public Builder setValueBytes(
- com.google.protobuf.ByteString value) {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -834,7 +834,7 @@ public final class NamespacePropertiesMessage {
return true;
}
- public void writeTo(com.google.protobuf.CodedOutputStream output)
+ public void writeTo(org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < props_.size(); i++) {
@@ -850,7 +850,7 @@ public final class NamespacePropertiesMessage {
size = 0;
for (int i = 0; i < props_.size(); i++) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeMessageSize(1, props_.get(i));
}
size += getUnknownFields().getSerializedSize();
@@ -866,24 +866,24 @@ public final class NamespacePropertiesMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.NamespacePropertiesMessage.NamespaceProperties parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.NamespacePropertiesMessage.NamespaceProperties parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.NamespacePropertiesMessage.NamespaceProperties parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.NamespacePropertiesMessage.NamespaceProperties parseFrom(
byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.NamespacePropertiesMessage.NamespaceProperties parseFrom(java.io.InputStream input)
@@ -892,7 +892,7 @@ public final class NamespacePropertiesMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.NamespacePropertiesMessage.NamespaceProperties parseFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -902,18 +902,18 @@ public final class NamespacePropertiesMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.NamespacePropertiesMessage.NamespaceProperties parseDelimitedFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.NamespacePropertiesMessage.NamespaceProperties parseFrom(
- com.google.protobuf.CodedInputStream input)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.NamespacePropertiesMessage.NamespaceProperties parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -927,7 +927,7 @@ public final class NamespacePropertiesMessage {
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -935,14 +935,14 @@ public final class NamespacePropertiesMessage {
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.NamespaceProperties}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builderrepeated string namespace = 1;
*/
- com.google.protobuf.ByteString
+ org.apache.hbase.shaded.com.google.protobuf.ByteString
getNamespaceBytes(int index);
}
/**
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.Namespaces}
*/
public static final class Namespaces extends
- com.google.protobuf.GeneratedMessage
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage
implements NamespacesOrBuilder {
// Use Namespaces.newBuilder() to construct.
- private Namespaces(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private Namespaces(org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
- private Namespaces(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+ private Namespaces(boolean noInit) { this.unknownFields = org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Namespaces defaultInstance;
public static Namespaces getDefaultInstance() {
@@ -53,20 +53,20 @@ public final class NamespacesMessage {
return defaultInstance;
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
+ private final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
+ public final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Namespaces(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
@@ -84,7 +84,7 @@ public final class NamespacesMessage {
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
- namespace_ = new com.google.protobuf.LazyStringArrayList();
+ namespace_ = new org.apache.hbase.shaded.com.google.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000001;
}
namespace_.add(input.readBytes());
@@ -92,49 +92,49 @@ public final class NamespacesMessage {
}
}
}
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
+ throw new org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
- namespace_ = new com.google.protobuf.UnmodifiableLazyStringList(namespace_);
+ namespace_ = new org.apache.hbase.shaded.com.google.protobuf.UnmodifiableLazyStringList(namespace_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
- public static final com.google.protobuf.Descriptors.Descriptor
+ public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.rest.protobuf.generated.NamespacesMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Namespaces_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.rest.protobuf.generated.NamespacesMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Namespaces_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.rest.protobuf.generated.NamespacesMessage.Namespaces.class, org.apache.hadoop.hbase.rest.protobuf.generated.NamespacesMessage.Namespaces.Builder.class);
}
- public static com.google.protobuf.Parserrepeated string namespace = 1;
*/
@@ -157,13 +157,13 @@ public final class NamespacesMessage {
/**
* repeated string namespace = 1;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getNamespaceBytes(int index) {
return namespace_.getByteString(index);
}
private void initFields() {
- namespace_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+ namespace_ = org.apache.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
@@ -174,7 +174,7 @@ public final class NamespacesMessage {
return true;
}
- public void writeTo(com.google.protobuf.CodedOutputStream output)
+ public void writeTo(org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < namespace_.size(); i++) {
@@ -192,7 +192,7 @@ public final class NamespacesMessage {
{
int dataSize = 0;
for (int i = 0; i < namespace_.size(); i++) {
- dataSize += com.google.protobuf.CodedOutputStream
+ dataSize += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSizeNoTag(namespace_.getByteString(i));
}
size += dataSize;
@@ -211,24 +211,24 @@ public final class NamespacesMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.NamespacesMessage.Namespaces parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.NamespacesMessage.Namespaces parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.NamespacesMessage.Namespaces parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.NamespacesMessage.Namespaces parseFrom(
byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.NamespacesMessage.Namespaces parseFrom(java.io.InputStream input)
@@ -237,7 +237,7 @@ public final class NamespacesMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.NamespacesMessage.Namespaces parseFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -247,18 +247,18 @@ public final class NamespacesMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.NamespacesMessage.Namespaces parseDelimitedFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.NamespacesMessage.Namespaces parseFrom(
- com.google.protobuf.CodedInputStream input)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.NamespacesMessage.Namespaces parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -272,7 +272,7 @@ public final class NamespacesMessage {
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -280,14 +280,14 @@ public final class NamespacesMessage {
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.Namespaces}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builderrepeated string namespace = 1;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getNamespaceBytes(int index) {
return namespace_.getByteString(index);
}
@@ -475,7 +475,7 @@ public final class NamespacesMessage {
* repeated string namespace = 1;
*/
public Builder clearNamespace() {
- namespace_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+ namespace_ = org.apache.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
@@ -484,7 +484,7 @@ public final class NamespacesMessage {
* repeated string namespace = 1;
*/
public Builder addNamespaceBytes(
- com.google.protobuf.ByteString value) {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -505,17 +505,17 @@ public final class NamespacesMessage {
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.Namespaces)
}
- private static com.google.protobuf.Descriptors.Descriptor
+ private static org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Namespaces_descriptor;
private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Namespaces_fieldAccessorTable;
- public static com.google.protobuf.Descriptors.FileDescriptor
+ public static org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
- private static com.google.protobuf.Descriptors.FileDescriptor
+ private static org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
@@ -523,23 +523,23 @@ public final class NamespacesMessage {
"doop.hbase.rest.protobuf.generated\"\037\n\nNa" +
"mespaces\022\021\n\tnamespace\030\001 \003(\t"
};
- com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
- new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
- public com.google.protobuf.ExtensionRegistry assignDescriptors(
- com.google.protobuf.Descriptors.FileDescriptor root) {
+ org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+ new org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+ public org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors(
+ org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Namespaces_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Namespaces_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Namespaces_descriptor,
new java.lang.String[] { "Namespace", });
return null;
}
};
- com.google.protobuf.Descriptors.FileDescriptor
+ org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
- new com.google.protobuf.Descriptors.FileDescriptor[] {
+ new org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
}
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ScannerMessage.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ScannerMessage.java
index ef9d1b9..4cba99e 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ScannerMessage.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ScannerMessage.java
@@ -6,10 +6,10 @@ package org.apache.hadoop.hbase.rest.protobuf.generated;
public final class ScannerMessage {
private ScannerMessage() {}
public static void registerAllExtensions(
- com.google.protobuf.ExtensionRegistry registry) {
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) {
}
public interface ScannerOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
+ extends org.apache.hbase.shaded.com.google.protobuf.MessageOrBuilder {
// optional bytes startRow = 1;
/**
@@ -19,7 +19,7 @@ public final class ScannerMessage {
/**
* optional bytes startRow = 1;
*/
- com.google.protobuf.ByteString getStartRow();
+ org.apache.hbase.shaded.com.google.protobuf.ByteString getStartRow();
// optional bytes endRow = 2;
/**
@@ -29,7 +29,7 @@ public final class ScannerMessage {
/**
* optional bytes endRow = 2;
*/
- com.google.protobuf.ByteString getEndRow();
+ org.apache.hbase.shaded.com.google.protobuf.ByteString getEndRow();
// repeated bytes columns = 3;
/**
@@ -43,7 +43,7 @@ public final class ScannerMessage {
/**
* repeated bytes columns = 3;
*/
- com.google.protobuf.ByteString getColumns(int index);
+ org.apache.hbase.shaded.com.google.protobuf.ByteString getColumns(int index);
// optional int32 batch = 4;
/**
@@ -97,7 +97,7 @@ public final class ScannerMessage {
/**
* optional string filter = 8;
*/
- com.google.protobuf.ByteString
+ org.apache.hbase.shaded.com.google.protobuf.ByteString
getFilterBytes();
// optional int32 caching = 9;
@@ -135,7 +135,7 @@ public final class ScannerMessage {
/**
* repeated string labels = 10;
*/
- com.google.protobuf.ByteString
+ org.apache.hbase.shaded.com.google.protobuf.ByteString
getLabelsBytes(int index);
// optional bool cacheBlocks = 11;
@@ -160,14 +160,14 @@ public final class ScannerMessage {
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.Scanner}
*/
public static final class Scanner extends
- com.google.protobuf.GeneratedMessage
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage
implements ScannerOrBuilder {
// Use Scanner.newBuilder() to construct.
- private Scanner(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private Scanner(org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
- private Scanner(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+ private Scanner(boolean noInit) { this.unknownFields = org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Scanner defaultInstance;
public static Scanner getDefaultInstance() {
@@ -178,20 +178,20 @@ public final class ScannerMessage {
return defaultInstance;
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
+ private final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
+ public final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Scanner(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
@@ -257,7 +257,7 @@ public final class ScannerMessage {
}
case 82: {
if (!((mutable_bitField0_ & 0x00000200) == 0x00000200)) {
- labels_ = new com.google.protobuf.LazyStringArrayList();
+ labels_ = new org.apache.hbase.shaded.com.google.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000200;
}
labels_.add(input.readBytes());
@@ -270,53 +270,53 @@ public final class ScannerMessage {
}
}
}
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
+ throw new org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
columns_ = java.util.Collections.unmodifiableList(columns_);
}
if (((mutable_bitField0_ & 0x00000200) == 0x00000200)) {
- labels_ = new com.google.protobuf.UnmodifiableLazyStringList(labels_);
+ labels_ = new org.apache.hbase.shaded.com.google.protobuf.UnmodifiableLazyStringList(labels_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
- public static final com.google.protobuf.Descriptors.Descriptor
+ public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Scanner_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Scanner_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner.class, org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner.Builder.class);
}
- public static com.google.protobuf.Parseroptional bytes startRow = 1;
*/
@@ -326,13 +326,13 @@ public final class ScannerMessage {
/**
* optional bytes startRow = 1;
*/
- public com.google.protobuf.ByteString getStartRow() {
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString getStartRow() {
return startRow_;
}
// optional bytes endRow = 2;
public static final int ENDROW_FIELD_NUMBER = 2;
- private com.google.protobuf.ByteString endRow_;
+ private org.apache.hbase.shaded.com.google.protobuf.ByteString endRow_;
/**
* optional bytes endRow = 2;
*/
@@ -342,7 +342,7 @@ public final class ScannerMessage {
/**
* optional bytes endRow = 2;
*/
- public com.google.protobuf.ByteString getEndRow() {
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString getEndRow() {
return endRow_;
}
@@ -365,7 +365,7 @@ public final class ScannerMessage {
/**
* repeated bytes columns = 3;
*/
- public com.google.protobuf.ByteString getColumns(int index) {
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString getColumns(int index) {
return columns_.get(index);
}
@@ -450,8 +450,8 @@ public final class ScannerMessage {
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
+ org.apache.hbase.shaded.com.google.protobuf.ByteString bs =
+ (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
filter_ = s;
@@ -462,17 +462,17 @@ public final class ScannerMessage {
/**
* optional string filter = 8;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
filter_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
@@ -502,7 +502,7 @@ public final class ScannerMessage {
// repeated string labels = 10;
public static final int LABELS_FIELD_NUMBER = 10;
- private com.google.protobuf.LazyStringList labels_;
+ private org.apache.hbase.shaded.com.google.protobuf.LazyStringList labels_;
/**
* repeated string labels = 10;
*/
@@ -525,7 +525,7 @@ public final class ScannerMessage {
/**
* repeated string labels = 10;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getLabelsBytes(int index) {
return labels_.getByteString(index);
}
@@ -555,8 +555,8 @@ public final class ScannerMessage {
}
private void initFields() {
- startRow_ = com.google.protobuf.ByteString.EMPTY;
- endRow_ = com.google.protobuf.ByteString.EMPTY;
+ startRow_ = org.apache.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
+ endRow_ = org.apache.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
columns_ = java.util.Collections.emptyList();
batch_ = 0;
startTime_ = 0L;
@@ -564,7 +564,7 @@ public final class ScannerMessage {
maxVersions_ = 0;
filter_ = "";
caching_ = 0;
- labels_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+ labels_ = org.apache.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY;
cacheBlocks_ = false;
}
private byte memoizedIsInitialized = -1;
@@ -576,7 +576,7 @@ public final class ScannerMessage {
return true;
}
- public void writeTo(com.google.protobuf.CodedOutputStream output)
+ public void writeTo(org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
@@ -622,57 +622,57 @@ public final class ScannerMessage {
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(1, startRow_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(2, endRow_);
}
{
int dataSize = 0;
for (int i = 0; i < columns_.size(); i++) {
- dataSize += com.google.protobuf.CodedOutputStream
+ dataSize += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSizeNoTag(columns_.get(i));
}
size += dataSize;
size += 1 * getColumnsList().size();
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeInt32Size(4, batch_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeInt64Size(5, startTime_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeInt64Size(6, endTime_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeInt32Size(7, maxVersions_);
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(8, getFilterBytes());
}
if (((bitField0_ & 0x00000080) == 0x00000080)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeInt32Size(9, caching_);
}
{
int dataSize = 0;
for (int i = 0; i < labels_.size(); i++) {
- dataSize += com.google.protobuf.CodedOutputStream
+ dataSize += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSizeNoTag(labels_.getByteString(i));
}
size += dataSize;
size += 1 * getLabelsList().size();
}
if (((bitField0_ & 0x00000100) == 0x00000100)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBoolSize(11, cacheBlocks_);
}
size += getUnknownFields().getSerializedSize();
@@ -688,24 +688,24 @@ public final class ScannerMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner parseFrom(
byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner parseFrom(java.io.InputStream input)
@@ -714,7 +714,7 @@ public final class ScannerMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner parseFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -724,18 +724,18 @@ public final class ScannerMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner parseDelimitedFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner parseFrom(
- com.google.protobuf.CodedInputStream input)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -749,7 +749,7 @@ public final class ScannerMessage {
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -757,14 +757,14 @@ public final class ScannerMessage {
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.Scanner}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builderoptional bytes startRow = 1;
*/
@@ -991,13 +991,13 @@ public final class ScannerMessage {
/**
* optional bytes startRow = 1;
*/
- public com.google.protobuf.ByteString getStartRow() {
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString getStartRow() {
return startRow_;
}
/**
* optional bytes startRow = 1;
*/
- public Builder setStartRow(com.google.protobuf.ByteString value) {
+ public Builder setStartRow(org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -1017,7 +1017,7 @@ public final class ScannerMessage {
}
// optional bytes endRow = 2;
- private com.google.protobuf.ByteString endRow_ = com.google.protobuf.ByteString.EMPTY;
+ private org.apache.hbase.shaded.com.google.protobuf.ByteString endRow_ = org.apache.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
/**
* optional bytes endRow = 2;
*/
@@ -1027,13 +1027,13 @@ public final class ScannerMessage {
/**
* optional bytes endRow = 2;
*/
- public com.google.protobuf.ByteString getEndRow() {
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString getEndRow() {
return endRow_;
}
/**
* optional bytes endRow = 2;
*/
- public Builder setEndRow(com.google.protobuf.ByteString value) {
+ public Builder setEndRow(org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -1076,14 +1076,14 @@ public final class ScannerMessage {
/**
* repeated bytes columns = 3;
*/
- public com.google.protobuf.ByteString getColumns(int index) {
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString getColumns(int index) {
return columns_.get(index);
}
/**
* repeated bytes columns = 3;
*/
public Builder setColumns(
- int index, com.google.protobuf.ByteString value) {
+ int index, org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -1095,7 +1095,7 @@ public final class ScannerMessage {
/**
* repeated bytes columns = 3;
*/
- public Builder addColumns(com.google.protobuf.ByteString value) {
+ public Builder addColumns(org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -1108,7 +1108,7 @@ public final class ScannerMessage {
* repeated bytes columns = 3;
*/
public Builder addAllColumns(
- java.lang.Iterable extends com.google.protobuf.ByteString> values) {
+ java.lang.Iterable extends org.apache.hbase.shaded.com.google.protobuf.ByteString> values) {
ensureColumnsIsMutable();
super.addAll(values, columns_);
onChanged();
@@ -1270,7 +1270,7 @@ public final class ScannerMessage {
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
- java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ java.lang.String s = ((org.apache.hbase.shaded.com.google.protobuf.ByteString) ref)
.toStringUtf8();
filter_ = s;
return s;
@@ -1281,17 +1281,17 @@ public final class ScannerMessage {
/**
* optional string filter = 8;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
filter_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
/**
@@ -1320,7 +1320,7 @@ public final class ScannerMessage {
* optional string filter = 8;
*/
public Builder setFilterBytes(
- com.google.protobuf.ByteString value) {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -1380,10 +1380,10 @@ public final class ScannerMessage {
}
// repeated string labels = 10;
- private com.google.protobuf.LazyStringList labels_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+ private org.apache.hbase.shaded.com.google.protobuf.LazyStringList labels_ = org.apache.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY;
private void ensureLabelsIsMutable() {
if (!((bitField0_ & 0x00000200) == 0x00000200)) {
- labels_ = new com.google.protobuf.LazyStringArrayList(labels_);
+ labels_ = new org.apache.hbase.shaded.com.google.protobuf.LazyStringArrayList(labels_);
bitField0_ |= 0x00000200;
}
}
@@ -1409,7 +1409,7 @@ public final class ScannerMessage {
/**
* repeated string labels = 10;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getLabelsBytes(int index) {
return labels_.getByteString(index);
}
@@ -1453,7 +1453,7 @@ public final class ScannerMessage {
* repeated string labels = 10;
*/
public Builder clearLabels() {
- labels_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+ labels_ = org.apache.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000200);
onChanged();
return this;
@@ -1462,7 +1462,7 @@ public final class ScannerMessage {
* repeated string labels = 10;
*/
public Builder addLabelsBytes(
- com.google.protobuf.ByteString value) {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -1532,17 +1532,17 @@ public final class ScannerMessage {
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.Scanner)
}
- private static com.google.protobuf.Descriptors.Descriptor
+ private static org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Scanner_descriptor;
private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Scanner_fieldAccessorTable;
- public static com.google.protobuf.Descriptors.FileDescriptor
+ public static org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
- private static com.google.protobuf.Descriptors.FileDescriptor
+ private static org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
@@ -1554,23 +1554,23 @@ public final class ScannerMessage {
"\030\007 \001(\005\022\016\n\006filter\030\010 \001(\t\022\017\n\007caching\030\t \001(\005\022" +
"\016\n\006labels\030\n \003(\t\022\023\n\013cacheBlocks\030\013 \001(\010"
};
- com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
- new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
- public com.google.protobuf.ExtensionRegistry assignDescriptors(
- com.google.protobuf.Descriptors.FileDescriptor root) {
+ org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+ new org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+ public org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors(
+ org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Scanner_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Scanner_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Scanner_descriptor,
new java.lang.String[] { "StartRow", "EndRow", "Columns", "Batch", "StartTime", "EndTime", "MaxVersions", "Filter", "Caching", "Labels", "CacheBlocks", });
return null;
}
};
- com.google.protobuf.Descriptors.FileDescriptor
+ org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
- new com.google.protobuf.Descriptors.FileDescriptor[] {
+ new org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
}
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/StorageClusterStatusMessage.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/StorageClusterStatusMessage.java
index 44dce76..2733953 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/StorageClusterStatusMessage.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/StorageClusterStatusMessage.java
@@ -6,10 +6,10 @@ package org.apache.hadoop.hbase.rest.protobuf.generated;
public final class StorageClusterStatusMessage {
private StorageClusterStatusMessage() {}
public static void registerAllExtensions(
- com.google.protobuf.ExtensionRegistry registry) {
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) {
}
public interface StorageClusterStatusOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
+ extends org.apache.hbase.shaded.com.google.protobuf.MessageOrBuilder {
// repeated .org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Node liveNodes = 1;
/**
@@ -73,7 +73,7 @@ public final class StorageClusterStatusMessage {
/**
* repeated string deadNodes = 2;
*/
- com.google.protobuf.ByteString
+ org.apache.hbase.shaded.com.google.protobuf.ByteString
getDeadNodesBytes(int index);
// optional int32 regions = 3;
@@ -118,14 +118,14 @@ public final class StorageClusterStatusMessage {
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus}
*/
public static final class StorageClusterStatus extends
- com.google.protobuf.GeneratedMessage
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage
implements StorageClusterStatusOrBuilder {
// Use StorageClusterStatus.newBuilder() to construct.
- private StorageClusterStatus(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private StorageClusterStatus(org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
- private StorageClusterStatus(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+ private StorageClusterStatus(boolean noInit) { this.unknownFields = org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final StorageClusterStatus defaultInstance;
public static StorageClusterStatus getDefaultInstance() {
@@ -136,20 +136,20 @@ public final class StorageClusterStatusMessage {
return defaultInstance;
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
+ private final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
+ public final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private StorageClusterStatus(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
@@ -175,7 +175,7 @@ public final class StorageClusterStatusMessage {
}
case 18: {
if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
- deadNodes_ = new com.google.protobuf.LazyStringArrayList();
+ deadNodes_ = new org.apache.hbase.shaded.com.google.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000002;
}
deadNodes_.add(input.readBytes());
@@ -198,51 +198,51 @@ public final class StorageClusterStatusMessage {
}
}
}
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
+ throw new org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
liveNodes_ = java.util.Collections.unmodifiableList(liveNodes_);
}
if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
- deadNodes_ = new com.google.protobuf.UnmodifiableLazyStringList(deadNodes_);
+ deadNodes_ = new org.apache.hbase.shaded.com.google.protobuf.UnmodifiableLazyStringList(deadNodes_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
- public static final com.google.protobuf.Descriptors.Descriptor
+ public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.class, org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Builder.class);
}
- public static com.google.protobuf.Parserrequired bytes name = 1;
*/
- com.google.protobuf.ByteString getName();
+ org.apache.hbase.shaded.com.google.protobuf.ByteString getName();
// optional int32 stores = 2;
/**
@@ -378,14 +378,14 @@ public final class StorageClusterStatusMessage {
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Region}
*/
public static final class Region extends
- com.google.protobuf.GeneratedMessage
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage
implements RegionOrBuilder {
// Use Region.newBuilder() to construct.
- private Region(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private Region(org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
- private Region(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+ private Region(boolean noInit) { this.unknownFields = org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Region defaultInstance;
public static Region getDefaultInstance() {
@@ -396,20 +396,20 @@ public final class StorageClusterStatusMessage {
return defaultInstance;
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
+ private final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
+ public final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Region(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
@@ -492,47 +492,47 @@ public final class StorageClusterStatusMessage {
}
}
}
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
+ throw new org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
- public static final com.google.protobuf.Descriptors.Descriptor
+ public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Region_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Region_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.class, org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.Builder.class);
}
- public static com.google.protobuf.Parserrequired bytes name = 1;
*/
@@ -542,7 +542,7 @@ public final class StorageClusterStatusMessage {
/**
* required bytes name = 1;
*/
- public com.google.protobuf.ByteString getName() {
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString getName() {
return name_;
}
@@ -739,7 +739,7 @@ public final class StorageClusterStatusMessage {
}
private void initFields() {
- name_ = com.google.protobuf.ByteString.EMPTY;
+ name_ = org.apache.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
stores_ = 0;
storefiles_ = 0;
storefileSizeMB_ = 0;
@@ -766,7 +766,7 @@ public final class StorageClusterStatusMessage {
return true;
}
- public void writeTo(com.google.protobuf.CodedOutputStream output)
+ public void writeTo(org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
@@ -818,55 +818,55 @@ public final class StorageClusterStatusMessage {
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(1, name_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeInt32Size(2, stores_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeInt32Size(3, storefiles_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeInt32Size(4, storefileSizeMB_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeInt32Size(5, memstoreSizeMB_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeInt32Size(6, storefileIndexSizeMB_);
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeInt64Size(7, readRequestsCount_);
}
if (((bitField0_ & 0x00000080) == 0x00000080)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeInt64Size(8, writeRequestsCount_);
}
if (((bitField0_ & 0x00000100) == 0x00000100)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeInt32Size(9, rootIndexSizeKB_);
}
if (((bitField0_ & 0x00000200) == 0x00000200)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeInt32Size(10, totalStaticIndexSizeKB_);
}
if (((bitField0_ & 0x00000400) == 0x00000400)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeInt32Size(11, totalStaticBloomSizeKB_);
}
if (((bitField0_ & 0x00000800) == 0x00000800)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeInt64Size(12, totalCompactingKVs_);
}
if (((bitField0_ & 0x00001000) == 0x00001000)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeInt64Size(13, currentCompactedKVs_);
}
size += getUnknownFields().getSerializedSize();
@@ -882,24 +882,24 @@ public final class StorageClusterStatusMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(
byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(java.io.InputStream input)
@@ -908,7 +908,7 @@ public final class StorageClusterStatusMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -918,18 +918,18 @@ public final class StorageClusterStatusMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseDelimitedFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(
- com.google.protobuf.CodedInputStream input)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -943,7 +943,7 @@ public final class StorageClusterStatusMessage {
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -951,14 +951,14 @@ public final class StorageClusterStatusMessage {
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Region}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builderrequired bytes name = 1;
*/
@@ -1188,13 +1188,13 @@ public final class StorageClusterStatusMessage {
/**
* required bytes name = 1;
*/
- public com.google.protobuf.ByteString getName() {
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString getName() {
return name_;
}
/**
* required bytes name = 1;
*/
- public Builder setName(com.google.protobuf.ByteString value) {
+ public Builder setName(org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -1621,7 +1621,7 @@ public final class StorageClusterStatusMessage {
}
public interface NodeOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
+ extends org.apache.hbase.shaded.com.google.protobuf.MessageOrBuilder {
// required string name = 1;
/**
@@ -1647,7 +1647,7 @@ public final class StorageClusterStatusMessage {
* name:port
*
*/
- com.google.protobuf.ByteString
+ org.apache.hbase.shaded.com.google.protobuf.ByteString
getNameBytes();
// optional int64 startCode = 2;
@@ -1719,14 +1719,14 @@ public final class StorageClusterStatusMessage {
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Node}
*/
public static final class Node extends
- com.google.protobuf.GeneratedMessage
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage
implements NodeOrBuilder {
// Use Node.newBuilder() to construct.
- private Node(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private Node(org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
- private Node(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+ private Node(boolean noInit) { this.unknownFields = org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Node defaultInstance;
public static Node getDefaultInstance() {
@@ -1737,20 +1737,20 @@ public final class StorageClusterStatusMessage {
return defaultInstance;
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
+ private final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
+ public final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Node(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
@@ -1801,10 +1801,10 @@ public final class StorageClusterStatusMessage {
}
}
}
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
+ throw new org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) {
@@ -1814,30 +1814,30 @@ public final class StorageClusterStatusMessage {
makeExtensionsImmutable();
}
}
- public static final com.google.protobuf.Descriptors.Descriptor
+ public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Node_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Node_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.class, org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.Builder.class);
}
- public static com.google.protobuf.Parserrepeated string deadNodes = 2;
*/
@@ -2927,7 +2927,7 @@ public final class StorageClusterStatusMessage {
/**
* repeated string deadNodes = 2;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getDeadNodesBytes(int index) {
return deadNodes_.getByteString(index);
}
@@ -2990,7 +2990,7 @@ public final class StorageClusterStatusMessage {
private void initFields() {
liveNodes_ = java.util.Collections.emptyList();
- deadNodes_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+ deadNodes_ = org.apache.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY;
regions_ = 0;
requests_ = 0L;
averageLoad_ = 0D;
@@ -3010,7 +3010,7 @@ public final class StorageClusterStatusMessage {
return true;
}
- public void writeTo(com.google.protobuf.CodedOutputStream output)
+ public void writeTo(org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < liveNodes_.size(); i++) {
@@ -3038,28 +3038,28 @@ public final class StorageClusterStatusMessage {
size = 0;
for (int i = 0; i < liveNodes_.size(); i++) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeMessageSize(1, liveNodes_.get(i));
}
{
int dataSize = 0;
for (int i = 0; i < deadNodes_.size(); i++) {
- dataSize += com.google.protobuf.CodedOutputStream
+ dataSize += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSizeNoTag(deadNodes_.getByteString(i));
}
size += dataSize;
size += 1 * getDeadNodesList().size();
}
if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeInt32Size(3, regions_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeInt64Size(4, requests_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeDoubleSize(5, averageLoad_);
}
size += getUnknownFields().getSerializedSize();
@@ -3075,24 +3075,24 @@ public final class StorageClusterStatusMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseFrom(
byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseFrom(java.io.InputStream input)
@@ -3101,7 +3101,7 @@ public final class StorageClusterStatusMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -3111,18 +3111,18 @@ public final class StorageClusterStatusMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseDelimitedFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseFrom(
- com.google.protobuf.CodedInputStream input)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -3136,7 +3136,7 @@ public final class StorageClusterStatusMessage {
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -3144,14 +3144,14 @@ public final class StorageClusterStatusMessage {
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builderrepeated string deadNodes = 2;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getDeadNodesBytes(int index) {
return deadNodes_.getByteString(index);
}
@@ -3728,7 +3728,7 @@ public final class StorageClusterStatusMessage {
* repeated string deadNodes = 2;
*/
public Builder clearDeadNodes() {
- deadNodes_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+ deadNodes_ = org.apache.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
@@ -3737,7 +3737,7 @@ public final class StorageClusterStatusMessage {
* repeated string deadNodes = 2;
*/
public Builder addDeadNodesBytes(
- com.google.protobuf.ByteString value) {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -3873,27 +3873,27 @@ public final class StorageClusterStatusMessage {
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus)
}
- private static com.google.protobuf.Descriptors.Descriptor
+ private static org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_descriptor;
private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_fieldAccessorTable;
- private static com.google.protobuf.Descriptors.Descriptor
+ private static org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Region_descriptor;
private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Region_fieldAccessorTable;
- private static com.google.protobuf.Descriptors.Descriptor
+ private static org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Node_descriptor;
private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Node_fieldAccessorTable;
- public static com.google.protobuf.Descriptors.FileDescriptor
+ public static org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
- private static com.google.protobuf.Descriptors.FileDescriptor
+ private static org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
@@ -3919,35 +3919,35 @@ public final class StorageClusterStatusMessage {
".hbase.rest.protobuf.generated.StorageCl",
"usterStatus.Region"
};
- com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
- new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
- public com.google.protobuf.ExtensionRegistry assignDescriptors(
- com.google.protobuf.Descriptors.FileDescriptor root) {
+ org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+ new org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+ public org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors(
+ org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_descriptor,
new java.lang.String[] { "LiveNodes", "DeadNodes", "Regions", "Requests", "AverageLoad", });
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Region_descriptor =
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_descriptor.getNestedTypes().get(0);
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Region_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Region_descriptor,
new java.lang.String[] { "Name", "Stores", "Storefiles", "StorefileSizeMB", "MemstoreSizeMB", "StorefileIndexSizeMB", "ReadRequestsCount", "WriteRequestsCount", "RootIndexSizeKB", "TotalStaticIndexSizeKB", "TotalStaticBloomSizeKB", "TotalCompactingKVs", "CurrentCompactedKVs", });
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Node_descriptor =
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_descriptor.getNestedTypes().get(1);
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Node_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Node_descriptor,
new java.lang.String[] { "Name", "StartCode", "Requests", "HeapSizeMB", "MaxHeapSizeMB", "Regions", });
return null;
}
};
- com.google.protobuf.Descriptors.FileDescriptor
+ org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
- new com.google.protobuf.Descriptors.FileDescriptor[] {
+ new org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
}
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableInfoMessage.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableInfoMessage.java
index 421c0ec..52dff35 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableInfoMessage.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableInfoMessage.java
@@ -6,10 +6,10 @@ package org.apache.hadoop.hbase.rest.protobuf.generated;
public final class TableInfoMessage {
private TableInfoMessage() {}
public static void registerAllExtensions(
- com.google.protobuf.ExtensionRegistry registry) {
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) {
}
public interface TableInfoOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
+ extends org.apache.hbase.shaded.com.google.protobuf.MessageOrBuilder {
// required string name = 1;
/**
@@ -23,7 +23,7 @@ public final class TableInfoMessage {
/**
* required string name = 1;
*/
- com.google.protobuf.ByteString
+ org.apache.hbase.shaded.com.google.protobuf.ByteString
getNameBytes();
// repeated .org.apache.hadoop.hbase.rest.protobuf.generated.TableInfo.Region regions = 2;
@@ -55,14 +55,14 @@ public final class TableInfoMessage {
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.TableInfo}
*/
public static final class TableInfo extends
- com.google.protobuf.GeneratedMessage
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage
implements TableInfoOrBuilder {
// Use TableInfo.newBuilder() to construct.
- private TableInfo(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private TableInfo(org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
- private TableInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+ private TableInfo(boolean noInit) { this.unknownFields = org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final TableInfo defaultInstance;
public static TableInfo getDefaultInstance() {
@@ -73,20 +73,20 @@ public final class TableInfoMessage {
return defaultInstance;
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
+ private final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
+ public final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private TableInfo(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
@@ -117,10 +117,10 @@ public final class TableInfoMessage {
}
}
}
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
+ throw new org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
@@ -130,35 +130,35 @@ public final class TableInfoMessage {
makeExtensionsImmutable();
}
}
- public static final com.google.protobuf.Descriptors.Descriptor
+ public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.class, org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Builder.class);
}
- public static com.google.protobuf.Parserrequired string name = 1;
*/
- com.google.protobuf.ByteString
+ org.apache.hbase.shaded.com.google.protobuf.ByteString
getNameBytes();
// optional bytes startKey = 2;
@@ -183,7 +183,7 @@ public final class TableInfoMessage {
/**
* optional bytes startKey = 2;
*/
- com.google.protobuf.ByteString getStartKey();
+ org.apache.hbase.shaded.com.google.protobuf.ByteString getStartKey();
// optional bytes endKey = 3;
/**
@@ -193,7 +193,7 @@ public final class TableInfoMessage {
/**
* optional bytes endKey = 3;
*/
- com.google.protobuf.ByteString getEndKey();
+ org.apache.hbase.shaded.com.google.protobuf.ByteString getEndKey();
// optional int64 id = 4;
/**
@@ -217,21 +217,21 @@ public final class TableInfoMessage {
/**
* optional string location = 5;
*/
- com.google.protobuf.ByteString
+ org.apache.hbase.shaded.com.google.protobuf.ByteString
getLocationBytes();
}
/**
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.TableInfo.Region}
*/
public static final class Region extends
- com.google.protobuf.GeneratedMessage
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage
implements RegionOrBuilder {
// Use Region.newBuilder() to construct.
- private Region(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private Region(org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
- private Region(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+ private Region(boolean noInit) { this.unknownFields = org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Region defaultInstance;
public static Region getDefaultInstance() {
@@ -242,20 +242,20 @@ public final class TableInfoMessage {
return defaultInstance;
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
+ private final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
+ public final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Region(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
@@ -298,40 +298,40 @@ public final class TableInfoMessage {
}
}
}
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
+ throw new org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
- public static final com.google.protobuf.Descriptors.Descriptor
+ public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_Region_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_Region_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region.class, org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region.Builder.class);
}
- public static com.google.protobuf.Parserrequired string name = 1;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
// optional bytes startKey = 2;
public static final int STARTKEY_FIELD_NUMBER = 2;
- private com.google.protobuf.ByteString startKey_;
+ private org.apache.hbase.shaded.com.google.protobuf.ByteString startKey_;
/**
* optional bytes startKey = 2;
*/
@@ -391,13 +391,13 @@ public final class TableInfoMessage {
/**
* optional bytes startKey = 2;
*/
- public com.google.protobuf.ByteString getStartKey() {
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString getStartKey() {
return startKey_;
}
// optional bytes endKey = 3;
public static final int ENDKEY_FIELD_NUMBER = 3;
- private com.google.protobuf.ByteString endKey_;
+ private org.apache.hbase.shaded.com.google.protobuf.ByteString endKey_;
/**
* optional bytes endKey = 3;
*/
@@ -407,7 +407,7 @@ public final class TableInfoMessage {
/**
* optional bytes endKey = 3;
*/
- public com.google.protobuf.ByteString getEndKey() {
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString getEndKey() {
return endKey_;
}
@@ -444,8 +444,8 @@ public final class TableInfoMessage {
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
+ org.apache.hbase.shaded.com.google.protobuf.ByteString bs =
+ (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
location_ = s;
@@ -456,24 +456,24 @@ public final class TableInfoMessage {
/**
* optional string location = 5;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getLocationBytes() {
java.lang.Object ref = location_;
if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
location_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
private void initFields() {
name_ = "";
- startKey_ = com.google.protobuf.ByteString.EMPTY;
- endKey_ = com.google.protobuf.ByteString.EMPTY;
+ startKey_ = org.apache.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
+ endKey_ = org.apache.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
id_ = 0L;
location_ = "";
}
@@ -490,7 +490,7 @@ public final class TableInfoMessage {
return true;
}
- public void writeTo(com.google.protobuf.CodedOutputStream output)
+ public void writeTo(org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
@@ -518,23 +518,23 @@ public final class TableInfoMessage {
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getNameBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(2, startKey_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(3, endKey_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeInt64Size(4, id_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(5, getLocationBytes());
}
size += getUnknownFields().getSerializedSize();
@@ -550,24 +550,24 @@ public final class TableInfoMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(
byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(java.io.InputStream input)
@@ -576,7 +576,7 @@ public final class TableInfoMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -586,18 +586,18 @@ public final class TableInfoMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region parseDelimitedFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(
- com.google.protobuf.CodedInputStream input)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -611,7 +611,7 @@ public final class TableInfoMessage {
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -619,14 +619,14 @@ public final class TableInfoMessage {
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.TableInfo.Region}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builderrequired string name = 1;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
/**
@@ -841,7 +841,7 @@ public final class TableInfoMessage {
* required string name = 1;
*/
public Builder setNameBytes(
- com.google.protobuf.ByteString value) {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -852,7 +852,7 @@ public final class TableInfoMessage {
}
// optional bytes startKey = 2;
- private com.google.protobuf.ByteString startKey_ = com.google.protobuf.ByteString.EMPTY;
+ private org.apache.hbase.shaded.com.google.protobuf.ByteString startKey_ = org.apache.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
/**
* optional bytes startKey = 2;
*/
@@ -862,13 +862,13 @@ public final class TableInfoMessage {
/**
* optional bytes startKey = 2;
*/
- public com.google.protobuf.ByteString getStartKey() {
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString getStartKey() {
return startKey_;
}
/**
* optional bytes startKey = 2;
*/
- public Builder setStartKey(com.google.protobuf.ByteString value) {
+ public Builder setStartKey(org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -888,7 +888,7 @@ public final class TableInfoMessage {
}
// optional bytes endKey = 3;
- private com.google.protobuf.ByteString endKey_ = com.google.protobuf.ByteString.EMPTY;
+ private org.apache.hbase.shaded.com.google.protobuf.ByteString endKey_ = org.apache.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
/**
* optional bytes endKey = 3;
*/
@@ -898,13 +898,13 @@ public final class TableInfoMessage {
/**
* optional bytes endKey = 3;
*/
- public com.google.protobuf.ByteString getEndKey() {
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString getEndKey() {
return endKey_;
}
/**
* optional bytes endKey = 3;
*/
- public Builder setEndKey(com.google.protobuf.ByteString value) {
+ public Builder setEndKey(org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -970,7 +970,7 @@ public final class TableInfoMessage {
public java.lang.String getLocation() {
java.lang.Object ref = location_;
if (!(ref instanceof java.lang.String)) {
- java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ java.lang.String s = ((org.apache.hbase.shaded.com.google.protobuf.ByteString) ref)
.toStringUtf8();
location_ = s;
return s;
@@ -981,17 +981,17 @@ public final class TableInfoMessage {
/**
* optional string location = 5;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getLocationBytes() {
java.lang.Object ref = location_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
location_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
/**
@@ -1020,7 +1020,7 @@ public final class TableInfoMessage {
* optional string location = 5;
*/
public Builder setLocationBytes(
- com.google.protobuf.ByteString value) {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -1059,8 +1059,8 @@ public final class TableInfoMessage {
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
+ org.apache.hbase.shaded.com.google.protobuf.ByteString bs =
+ (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
name_ = s;
@@ -1071,17 +1071,17 @@ public final class TableInfoMessage {
/**
* required string name = 1;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
@@ -1144,7 +1144,7 @@ public final class TableInfoMessage {
return true;
}
- public void writeTo(com.google.protobuf.CodedOutputStream output)
+ public void writeTo(org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
@@ -1163,11 +1163,11 @@ public final class TableInfoMessage {
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getNameBytes());
}
for (int i = 0; i < regions_.size(); i++) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeMessageSize(2, regions_.get(i));
}
size += getUnknownFields().getSerializedSize();
@@ -1183,24 +1183,24 @@ public final class TableInfoMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo parseFrom(
byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo parseFrom(java.io.InputStream input)
@@ -1209,7 +1209,7 @@ public final class TableInfoMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo parseFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -1219,18 +1219,18 @@ public final class TableInfoMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo parseDelimitedFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo parseFrom(
- com.google.protobuf.CodedInputStream input)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -1244,7 +1244,7 @@ public final class TableInfoMessage {
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -1252,14 +1252,14 @@ public final class TableInfoMessage {
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.TableInfo}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builderrequired string name = 1;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
/**
@@ -1484,7 +1484,7 @@ public final class TableInfoMessage {
* required string name = 1;
*/
public Builder setNameBytes(
- com.google.protobuf.ByteString value) {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -1504,7 +1504,7 @@ public final class TableInfoMessage {
}
}
- private com.google.protobuf.RepeatedFieldBuilder<
+ private org.apache.hbase.shaded.com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region, org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region.Builder, org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.RegionOrBuilder> regionsBuilder_;
/**
@@ -1719,11 +1719,11 @@ public final class TableInfoMessage {
getRegionsBuilderList() {
return getRegionsFieldBuilder().getBuilderList();
}
- private com.google.protobuf.RepeatedFieldBuilder<
+ private org.apache.hbase.shaded.com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region, org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region.Builder, org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.RegionOrBuilder>
getRegionsFieldBuilder() {
if (regionsBuilder_ == null) {
- regionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+ regionsBuilder_ = new org.apache.hbase.shaded.com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region, org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region.Builder, org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.RegionOrBuilder>(
regions_,
((bitField0_ & 0x00000002) == 0x00000002),
@@ -1745,22 +1745,22 @@ public final class TableInfoMessage {
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.TableInfo)
}
- private static com.google.protobuf.Descriptors.Descriptor
+ private static org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_descriptor;
private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_fieldAccessorTable;
- private static com.google.protobuf.Descriptors.Descriptor
+ private static org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_Region_descriptor;
private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_Region_fieldAccessorTable;
- public static com.google.protobuf.Descriptors.FileDescriptor
+ public static org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
- private static com.google.protobuf.Descriptors.FileDescriptor
+ private static org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
@@ -1772,29 +1772,29 @@ public final class TableInfoMessage {
"ame\030\001 \002(\t\022\020\n\010startKey\030\002 \001(\014\022\016\n\006endKey\030\003 " +
"\001(\014\022\n\n\002id\030\004 \001(\003\022\020\n\010location\030\005 \001(\t"
};
- com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
- new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
- public com.google.protobuf.ExtensionRegistry assignDescriptors(
- com.google.protobuf.Descriptors.FileDescriptor root) {
+ org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+ new org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+ public org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors(
+ org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_descriptor,
new java.lang.String[] { "Name", "Regions", });
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_Region_descriptor =
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_descriptor.getNestedTypes().get(0);
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_Region_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_Region_descriptor,
new java.lang.String[] { "Name", "StartKey", "EndKey", "Id", "Location", });
return null;
}
};
- com.google.protobuf.Descriptors.FileDescriptor
+ org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
- new com.google.protobuf.Descriptors.FileDescriptor[] {
+ new org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
}
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableListMessage.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableListMessage.java
index 20891a2..a794e42 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableListMessage.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableListMessage.java
@@ -6,10 +6,10 @@ package org.apache.hadoop.hbase.rest.protobuf.generated;
public final class TableListMessage {
private TableListMessage() {}
public static void registerAllExtensions(
- com.google.protobuf.ExtensionRegistry registry) {
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) {
}
public interface TableListOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
+ extends org.apache.hbase.shaded.com.google.protobuf.MessageOrBuilder {
// repeated string name = 1;
/**
@@ -28,21 +28,21 @@ public final class TableListMessage {
/**
* repeated string name = 1;
*/
- com.google.protobuf.ByteString
+ org.apache.hbase.shaded.com.google.protobuf.ByteString
getNameBytes(int index);
}
/**
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.TableList}
*/
public static final class TableList extends
- com.google.protobuf.GeneratedMessage
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage
implements TableListOrBuilder {
// Use TableList.newBuilder() to construct.
- private TableList(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private TableList(org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
- private TableList(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+ private TableList(boolean noInit) { this.unknownFields = org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final TableList defaultInstance;
public static TableList getDefaultInstance() {
@@ -53,20 +53,20 @@ public final class TableListMessage {
return defaultInstance;
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
+ private final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
+ public final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private TableList(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
@@ -84,7 +84,7 @@ public final class TableListMessage {
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
- name_ = new com.google.protobuf.LazyStringArrayList();
+ name_ = new org.apache.hbase.shaded.com.google.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000001;
}
name_.add(input.readBytes());
@@ -92,49 +92,49 @@ public final class TableListMessage {
}
}
}
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
+ throw new org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
- name_ = new com.google.protobuf.UnmodifiableLazyStringList(name_);
+ name_ = new org.apache.hbase.shaded.com.google.protobuf.UnmodifiableLazyStringList(name_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
- public static final com.google.protobuf.Descriptors.Descriptor
+ public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.class, org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.Builder.class);
}
- public static com.google.protobuf.Parserrepeated string name = 1;
*/
@@ -157,13 +157,13 @@ public final class TableListMessage {
/**
* repeated string name = 1;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getNameBytes(int index) {
return name_.getByteString(index);
}
private void initFields() {
- name_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+ name_ = org.apache.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
@@ -174,7 +174,7 @@ public final class TableListMessage {
return true;
}
- public void writeTo(com.google.protobuf.CodedOutputStream output)
+ public void writeTo(org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < name_.size(); i++) {
@@ -192,7 +192,7 @@ public final class TableListMessage {
{
int dataSize = 0;
for (int i = 0; i < name_.size(); i++) {
- dataSize += com.google.protobuf.CodedOutputStream
+ dataSize += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSizeNoTag(name_.getByteString(i));
}
size += dataSize;
@@ -211,24 +211,24 @@ public final class TableListMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(java.io.InputStream input)
@@ -237,7 +237,7 @@ public final class TableListMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -247,18 +247,18 @@ public final class TableListMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseDelimitedFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
- com.google.protobuf.CodedInputStream input)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -272,7 +272,7 @@ public final class TableListMessage {
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -280,14 +280,14 @@ public final class TableListMessage {
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.TableList}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builderrepeated string name = 1;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getNameBytes(int index) {
return name_.getByteString(index);
}
@@ -475,7 +475,7 @@ public final class TableListMessage {
* repeated string name = 1;
*/
public Builder clearName() {
- name_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+ name_ = org.apache.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
@@ -484,7 +484,7 @@ public final class TableListMessage {
* repeated string name = 1;
*/
public Builder addNameBytes(
- com.google.protobuf.ByteString value) {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -505,17 +505,17 @@ public final class TableListMessage {
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.TableList)
}
- private static com.google.protobuf.Descriptors.Descriptor
+ private static org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_descriptor;
private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable;
- public static com.google.protobuf.Descriptors.FileDescriptor
+ public static org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
- private static com.google.protobuf.Descriptors.FileDescriptor
+ private static org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
@@ -523,23 +523,23 @@ public final class TableListMessage {
"oop.hbase.rest.protobuf.generated\"\031\n\tTab" +
"leList\022\014\n\004name\030\001 \003(\t"
};
- com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
- new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
- public com.google.protobuf.ExtensionRegistry assignDescriptors(
- com.google.protobuf.Descriptors.FileDescriptor root) {
+ org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+ new org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+ public org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors(
+ org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_descriptor,
new java.lang.String[] { "Name", });
return null;
}
};
- com.google.protobuf.Descriptors.FileDescriptor
+ org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
- new com.google.protobuf.Descriptors.FileDescriptor[] {
+ new org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
}
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableSchemaMessage.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableSchemaMessage.java
index 88f9cd3..6ff9223 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableSchemaMessage.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableSchemaMessage.java
@@ -6,10 +6,10 @@ package org.apache.hadoop.hbase.rest.protobuf.generated;
public final class TableSchemaMessage {
private TableSchemaMessage() {}
public static void registerAllExtensions(
- com.google.protobuf.ExtensionRegistry registry) {
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) {
}
public interface TableSchemaOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
+ extends org.apache.hbase.shaded.com.google.protobuf.MessageOrBuilder {
// optional string name = 1;
/**
@@ -23,7 +23,7 @@ public final class TableSchemaMessage {
/**
* optional string name = 1;
*/
- com.google.protobuf.ByteString
+ org.apache.hbase.shaded.com.google.protobuf.ByteString
getNameBytes();
// repeated .org.apache.hadoop.hbase.rest.protobuf.generated.TableSchema.Attribute attrs = 2;
@@ -108,14 +108,14 @@ public final class TableSchemaMessage {
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.TableSchema}
*/
public static final class TableSchema extends
- com.google.protobuf.GeneratedMessage
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage
implements TableSchemaOrBuilder {
// Use TableSchema.newBuilder() to construct.
- private TableSchema(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private TableSchema(org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
- private TableSchema(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+ private TableSchema(boolean noInit) { this.unknownFields = org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final TableSchema defaultInstance;
public static TableSchema getDefaultInstance() {
@@ -126,20 +126,20 @@ public final class TableSchemaMessage {
return defaultInstance;
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
+ private final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
+ public final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private TableSchema(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
@@ -188,10 +188,10 @@ public final class TableSchemaMessage {
}
}
}
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
+ throw new org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
@@ -204,35 +204,35 @@ public final class TableSchemaMessage {
makeExtensionsImmutable();
}
}
- public static final com.google.protobuf.Descriptors.Descriptor
+ public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.class, org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Builder.class);
}
- public static com.google.protobuf.Parserrequired string name = 1;
*/
- com.google.protobuf.ByteString
+ org.apache.hbase.shaded.com.google.protobuf.ByteString
getNameBytes();
// required string value = 2;
@@ -261,21 +261,21 @@ public final class TableSchemaMessage {
/**
* required string value = 2;
*/
- com.google.protobuf.ByteString
+ org.apache.hbase.shaded.com.google.protobuf.ByteString
getValueBytes();
}
/**
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.TableSchema.Attribute}
*/
public static final class Attribute extends
- com.google.protobuf.GeneratedMessage
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage
implements AttributeOrBuilder {
// Use Attribute.newBuilder() to construct.
- private Attribute(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private Attribute(org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
- private Attribute(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+ private Attribute(boolean noInit) { this.unknownFields = org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Attribute defaultInstance;
public static Attribute getDefaultInstance() {
@@ -286,20 +286,20 @@ public final class TableSchemaMessage {
return defaultInstance;
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
+ private final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
+ public final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Attribute(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
@@ -327,40 +327,40 @@ public final class TableSchemaMessage {
}
}
}
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
+ throw new org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
- public static final com.google.protobuf.Descriptors.Descriptor
+ public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_Attribute_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_Attribute_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.class, org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.Builder.class);
}
- public static com.google.protobuf.Parserrequired string name = 1;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
@@ -425,8 +425,8 @@ public final class TableSchemaMessage {
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
+ org.apache.hbase.shaded.com.google.protobuf.ByteString bs =
+ (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
value_ = s;
@@ -437,17 +437,17 @@ public final class TableSchemaMessage {
/**
* required string value = 2;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getValueBytes() {
java.lang.Object ref = value_;
if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
value_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
@@ -472,7 +472,7 @@ public final class TableSchemaMessage {
return true;
}
- public void writeTo(com.google.protobuf.CodedOutputStream output)
+ public void writeTo(org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
@@ -491,11 +491,11 @@ public final class TableSchemaMessage {
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getNameBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(2, getValueBytes());
}
size += getUnknownFields().getSerializedSize();
@@ -511,24 +511,24 @@ public final class TableSchemaMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(
byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(java.io.InputStream input)
@@ -537,7 +537,7 @@ public final class TableSchemaMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -547,18 +547,18 @@ public final class TableSchemaMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseDelimitedFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(
- com.google.protobuf.CodedInputStream input)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -572,7 +572,7 @@ public final class TableSchemaMessage {
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -580,14 +580,14 @@ public final class TableSchemaMessage {
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.TableSchema.Attribute}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builderrequired string name = 1;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
/**
@@ -779,7 +779,7 @@ public final class TableSchemaMessage {
* required string name = 1;
*/
public Builder setNameBytes(
- com.google.protobuf.ByteString value) {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -803,7 +803,7 @@ public final class TableSchemaMessage {
public java.lang.String getValue() {
java.lang.Object ref = value_;
if (!(ref instanceof java.lang.String)) {
- java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ java.lang.String s = ((org.apache.hbase.shaded.com.google.protobuf.ByteString) ref)
.toStringUtf8();
value_ = s;
return s;
@@ -814,17 +814,17 @@ public final class TableSchemaMessage {
/**
* required string value = 2;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getValueBytes() {
java.lang.Object ref = value_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
value_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
/**
@@ -853,7 +853,7 @@ public final class TableSchemaMessage {
* required string value = 2;
*/
public Builder setValueBytes(
- com.google.protobuf.ByteString value) {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -892,8 +892,8 @@ public final class TableSchemaMessage {
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
+ org.apache.hbase.shaded.com.google.protobuf.ByteString bs =
+ (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
name_ = s;
@@ -904,17 +904,17 @@ public final class TableSchemaMessage {
/**
* optional string name = 1;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
@@ -1058,7 +1058,7 @@ public final class TableSchemaMessage {
return true;
}
- public void writeTo(com.google.protobuf.CodedOutputStream output)
+ public void writeTo(org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
@@ -1086,23 +1086,23 @@ public final class TableSchemaMessage {
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getNameBytes());
}
for (int i = 0; i < attrs_.size(); i++) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeMessageSize(2, attrs_.get(i));
}
for (int i = 0; i < columns_.size(); i++) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeMessageSize(3, columns_.get(i));
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBoolSize(4, inMemory_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBoolSize(5, readOnly_);
}
size += getUnknownFields().getSerializedSize();
@@ -1118,24 +1118,24 @@ public final class TableSchemaMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(
byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(java.io.InputStream input)
@@ -1144,7 +1144,7 @@ public final class TableSchemaMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -1154,18 +1154,18 @@ public final class TableSchemaMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema parseDelimitedFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(
- com.google.protobuf.CodedInputStream input)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -1179,7 +1179,7 @@ public final class TableSchemaMessage {
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -1187,14 +1187,14 @@ public final class TableSchemaMessage {
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.TableSchema}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builderoptional string name = 1;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
/**
@@ -1481,7 +1481,7 @@ public final class TableSchemaMessage {
* optional string name = 1;
*/
public Builder setNameBytes(
- com.google.protobuf.ByteString value) {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -1501,7 +1501,7 @@ public final class TableSchemaMessage {
}
}
- private com.google.protobuf.RepeatedFieldBuilder<
+ private org.apache.hbase.shaded.com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute, org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.Builder, org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.AttributeOrBuilder> attrsBuilder_;
/**
@@ -1716,11 +1716,11 @@ public final class TableSchemaMessage {
getAttrsBuilderList() {
return getAttrsFieldBuilder().getBuilderList();
}
- private com.google.protobuf.RepeatedFieldBuilder<
+ private org.apache.hbase.shaded.com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute, org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.Builder, org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.AttributeOrBuilder>
getAttrsFieldBuilder() {
if (attrsBuilder_ == null) {
- attrsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+ attrsBuilder_ = new org.apache.hbase.shaded.com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute, org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.Builder, org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.AttributeOrBuilder>(
attrs_,
((bitField0_ & 0x00000002) == 0x00000002),
@@ -1741,7 +1741,7 @@ public final class TableSchemaMessage {
}
}
- private com.google.protobuf.RepeatedFieldBuilder<
+ private org.apache.hbase.shaded.com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema, org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Builder, org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchemaOrBuilder> columnsBuilder_;
/**
@@ -1956,11 +1956,11 @@ public final class TableSchemaMessage {
getColumnsBuilderList() {
return getColumnsFieldBuilder().getBuilderList();
}
- private com.google.protobuf.RepeatedFieldBuilder<
+ private org.apache.hbase.shaded.com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema, org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Builder, org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchemaOrBuilder>
getColumnsFieldBuilder() {
if (columnsBuilder_ == null) {
- columnsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+ columnsBuilder_ = new org.apache.hbase.shaded.com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema, org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Builder, org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchemaOrBuilder>(
columns_,
((bitField0_ & 0x00000004) == 0x00000004),
@@ -2064,22 +2064,22 @@ public final class TableSchemaMessage {
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.TableSchema)
}
- private static com.google.protobuf.Descriptors.Descriptor
+ private static org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_descriptor;
private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_fieldAccessorTable;
- private static com.google.protobuf.Descriptors.Descriptor
+ private static org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_Attribute_descriptor;
private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_Attribute_fieldAccessorTable;
- public static com.google.protobuf.Descriptors.FileDescriptor
+ public static org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
- private static com.google.protobuf.Descriptors.FileDescriptor
+ private static org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
@@ -2094,29 +2094,29 @@ public final class TableSchemaMessage {
"\010readOnly\030\005 \001(\010\032(\n\tAttribute\022\014\n\004name\030\001 \002" +
"(\t\022\r\n\005value\030\002 \002(\t"
};
- com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
- new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
- public com.google.protobuf.ExtensionRegistry assignDescriptors(
- com.google.protobuf.Descriptors.FileDescriptor root) {
+ org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+ new org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+ public org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors(
+ org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_descriptor,
new java.lang.String[] { "Name", "Attrs", "Columns", "InMemory", "ReadOnly", });
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_Attribute_descriptor =
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_descriptor.getNestedTypes().get(0);
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_Attribute_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_Attribute_descriptor,
new java.lang.String[] { "Name", "Value", });
return null;
}
};
- com.google.protobuf.Descriptors.FileDescriptor
+ org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
- new com.google.protobuf.Descriptors.FileDescriptor[] {
+ new org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] {
org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.getDescriptor(),
}, assigner);
}
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/VersionMessage.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/VersionMessage.java
index 590b0d3..8f3391b 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/VersionMessage.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/VersionMessage.java
@@ -6,10 +6,10 @@ package org.apache.hadoop.hbase.rest.protobuf.generated;
public final class VersionMessage {
private VersionMessage() {}
public static void registerAllExtensions(
- com.google.protobuf.ExtensionRegistry registry) {
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) {
}
public interface VersionOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
+ extends org.apache.hbase.shaded.com.google.protobuf.MessageOrBuilder {
// optional string restVersion = 1;
/**
@@ -23,7 +23,7 @@ public final class VersionMessage {
/**
* optional string restVersion = 1;
*/
- com.google.protobuf.ByteString
+ org.apache.hbase.shaded.com.google.protobuf.ByteString
getRestVersionBytes();
// optional string jvmVersion = 2;
@@ -38,7 +38,7 @@ public final class VersionMessage {
/**
* optional string jvmVersion = 2;
*/
- com.google.protobuf.ByteString
+ org.apache.hbase.shaded.com.google.protobuf.ByteString
getJvmVersionBytes();
// optional string osVersion = 3;
@@ -53,7 +53,7 @@ public final class VersionMessage {
/**
* optional string osVersion = 3;
*/
- com.google.protobuf.ByteString
+ org.apache.hbase.shaded.com.google.protobuf.ByteString
getOsVersionBytes();
// optional string serverVersion = 4;
@@ -68,7 +68,7 @@ public final class VersionMessage {
/**
* optional string serverVersion = 4;
*/
- com.google.protobuf.ByteString
+ org.apache.hbase.shaded.com.google.protobuf.ByteString
getServerVersionBytes();
// optional string jerseyVersion = 5;
@@ -83,21 +83,21 @@ public final class VersionMessage {
/**
* optional string jerseyVersion = 5;
*/
- com.google.protobuf.ByteString
+ org.apache.hbase.shaded.com.google.protobuf.ByteString
getJerseyVersionBytes();
}
/**
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.Version}
*/
public static final class Version extends
- com.google.protobuf.GeneratedMessage
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage
implements VersionOrBuilder {
// Use Version.newBuilder() to construct.
- private Version(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private Version(org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
- private Version(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+ private Version(boolean noInit) { this.unknownFields = org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Version defaultInstance;
public static Version getDefaultInstance() {
@@ -108,20 +108,20 @@ public final class VersionMessage {
return defaultInstance;
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
+ private final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
+ public final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Version(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
@@ -164,40 +164,40 @@ public final class VersionMessage {
}
}
}
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
+ throw new org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
- public static final com.google.protobuf.Descriptors.Descriptor
+ public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Version_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Version_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version.class, org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version.Builder.class);
}
- public static com.google.protobuf.Parseroptional string restVersion = 1;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getRestVersionBytes() {
java.lang.Object ref = restVersion_;
if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
restVersion_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
@@ -262,8 +262,8 @@ public final class VersionMessage {
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
+ org.apache.hbase.shaded.com.google.protobuf.ByteString bs =
+ (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
jvmVersion_ = s;
@@ -274,17 +274,17 @@ public final class VersionMessage {
/**
* optional string jvmVersion = 2;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getJvmVersionBytes() {
java.lang.Object ref = jvmVersion_;
if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
jvmVersion_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
@@ -305,8 +305,8 @@ public final class VersionMessage {
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
+ org.apache.hbase.shaded.com.google.protobuf.ByteString bs =
+ (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
osVersion_ = s;
@@ -317,17 +317,17 @@ public final class VersionMessage {
/**
* optional string osVersion = 3;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getOsVersionBytes() {
java.lang.Object ref = osVersion_;
if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
osVersion_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
@@ -348,8 +348,8 @@ public final class VersionMessage {
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
+ org.apache.hbase.shaded.com.google.protobuf.ByteString bs =
+ (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
serverVersion_ = s;
@@ -360,17 +360,17 @@ public final class VersionMessage {
/**
* optional string serverVersion = 4;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getServerVersionBytes() {
java.lang.Object ref = serverVersion_;
if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
serverVersion_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
@@ -391,8 +391,8 @@ public final class VersionMessage {
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
+ org.apache.hbase.shaded.com.google.protobuf.ByteString bs =
+ (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
jerseyVersion_ = s;
@@ -403,17 +403,17 @@ public final class VersionMessage {
/**
* optional string jerseyVersion = 5;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getJerseyVersionBytes() {
java.lang.Object ref = jerseyVersion_;
if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
jerseyVersion_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
@@ -433,7 +433,7 @@ public final class VersionMessage {
return true;
}
- public void writeTo(com.google.protobuf.CodedOutputStream output)
+ public void writeTo(org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
@@ -461,23 +461,23 @@ public final class VersionMessage {
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getRestVersionBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(2, getJvmVersionBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(3, getOsVersionBytes());
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(4, getServerVersionBytes());
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(5, getJerseyVersionBytes());
}
size += getUnknownFields().getSerializedSize();
@@ -493,24 +493,24 @@ public final class VersionMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version parseFrom(
byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version parseFrom(java.io.InputStream input)
@@ -519,7 +519,7 @@ public final class VersionMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version parseFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -529,18 +529,18 @@ public final class VersionMessage {
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version parseDelimitedFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version parseFrom(
- com.google.protobuf.CodedInputStream input)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -554,7 +554,7 @@ public final class VersionMessage {
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -562,14 +562,14 @@ public final class VersionMessage {
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.Version}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builderoptional string restVersion = 1;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getRestVersionBytes() {
java.lang.Object ref = restVersion_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
restVersion_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
/**
@@ -786,7 +786,7 @@ public final class VersionMessage {
* optional string restVersion = 1;
*/
public Builder setRestVersionBytes(
- com.google.protobuf.ByteString value) {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -810,7 +810,7 @@ public final class VersionMessage {
public java.lang.String getJvmVersion() {
java.lang.Object ref = jvmVersion_;
if (!(ref instanceof java.lang.String)) {
- java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ java.lang.String s = ((org.apache.hbase.shaded.com.google.protobuf.ByteString) ref)
.toStringUtf8();
jvmVersion_ = s;
return s;
@@ -821,17 +821,17 @@ public final class VersionMessage {
/**
* optional string jvmVersion = 2;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getJvmVersionBytes() {
java.lang.Object ref = jvmVersion_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
jvmVersion_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
/**
@@ -860,7 +860,7 @@ public final class VersionMessage {
* optional string jvmVersion = 2;
*/
public Builder setJvmVersionBytes(
- com.google.protobuf.ByteString value) {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -884,7 +884,7 @@ public final class VersionMessage {
public java.lang.String getOsVersion() {
java.lang.Object ref = osVersion_;
if (!(ref instanceof java.lang.String)) {
- java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ java.lang.String s = ((org.apache.hbase.shaded.com.google.protobuf.ByteString) ref)
.toStringUtf8();
osVersion_ = s;
return s;
@@ -895,17 +895,17 @@ public final class VersionMessage {
/**
* optional string osVersion = 3;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getOsVersionBytes() {
java.lang.Object ref = osVersion_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
osVersion_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
/**
@@ -934,7 +934,7 @@ public final class VersionMessage {
* optional string osVersion = 3;
*/
public Builder setOsVersionBytes(
- com.google.protobuf.ByteString value) {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -958,7 +958,7 @@ public final class VersionMessage {
public java.lang.String getServerVersion() {
java.lang.Object ref = serverVersion_;
if (!(ref instanceof java.lang.String)) {
- java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ java.lang.String s = ((org.apache.hbase.shaded.com.google.protobuf.ByteString) ref)
.toStringUtf8();
serverVersion_ = s;
return s;
@@ -969,17 +969,17 @@ public final class VersionMessage {
/**
* optional string serverVersion = 4;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getServerVersionBytes() {
java.lang.Object ref = serverVersion_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
serverVersion_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
/**
@@ -1008,7 +1008,7 @@ public final class VersionMessage {
* optional string serverVersion = 4;
*/
public Builder setServerVersionBytes(
- com.google.protobuf.ByteString value) {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -1032,7 +1032,7 @@ public final class VersionMessage {
public java.lang.String getJerseyVersion() {
java.lang.Object ref = jerseyVersion_;
if (!(ref instanceof java.lang.String)) {
- java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ java.lang.String s = ((org.apache.hbase.shaded.com.google.protobuf.ByteString) ref)
.toStringUtf8();
jerseyVersion_ = s;
return s;
@@ -1043,17 +1043,17 @@ public final class VersionMessage {
/**
* optional string jerseyVersion = 5;
*/
- public com.google.protobuf.ByteString
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString
getJerseyVersionBytes() {
java.lang.Object ref = jerseyVersion_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
jerseyVersion_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
/**
@@ -1082,7 +1082,7 @@ public final class VersionMessage {
* optional string jerseyVersion = 5;
*/
public Builder setJerseyVersionBytes(
- com.google.protobuf.ByteString value) {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -1103,17 +1103,17 @@ public final class VersionMessage {
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.Version)
}
- private static com.google.protobuf.Descriptors.Descriptor
+ private static org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Version_descriptor;
private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Version_fieldAccessorTable;
- public static com.google.protobuf.Descriptors.FileDescriptor
+ public static org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
- private static com.google.protobuf.Descriptors.FileDescriptor
+ private static org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
@@ -1123,23 +1123,23 @@ public final class VersionMessage {
"\001(\t\022\021\n\tosVersion\030\003 \001(\t\022\025\n\rserverVersion\030" +
"\004 \001(\t\022\025\n\rjerseyVersion\030\005 \001(\t"
};
- com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
- new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
- public com.google.protobuf.ExtensionRegistry assignDescriptors(
- com.google.protobuf.Descriptors.FileDescriptor root) {
+ org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+ new org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+ public org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors(
+ org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Version_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Version_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Version_descriptor,
new java.lang.String[] { "RestVersion", "JvmVersion", "OsVersion", "ServerVersion", "JerseyVersion", });
return null;
}
};
- com.google.protobuf.Descriptors.FileDescriptor
+ org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
- new com.google.protobuf.Descriptors.FileDescriptor[] {
+ new org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
}
diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminClient.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminClient.java
index a7f14f7..7c3350d 100644
--- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminClient.java
+++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminClient.java
@@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.rsgroup;
import com.google.common.collect.Sets;
import com.google.common.net.HostAndPort;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
import java.io.IOException;
import java.util.ArrayList;
diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
index 22bad72..6628abe 100644
--- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
+++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
@@ -22,9 +22,9 @@ package org.apache.hadoop.hbase.rsgroup;
import com.google.common.collect.Sets;
import com.google.common.net.HostAndPort;
-import com.google.protobuf.RpcCallback;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.Service;
+import org.apache.hbase.shaded.com.google.protobuf.RpcCallback;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.Service;
import java.io.IOException;
import java.util.HashSet;
diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
index 01efefc..c97033f 100644
--- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
+++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
@@ -25,7 +25,7 @@ import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.net.HostAndPort;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
import java.io.IOException;
import java.util.ArrayList;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableWrapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableWrapper.java
index 7865cc0..9097698 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableWrapper.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableWrapper.java
@@ -37,10 +37,10 @@ import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
import org.apache.hadoop.io.MultipleIOException;
-import com.google.protobuf.Descriptors.MethodDescriptor;
-import com.google.protobuf.Message;
-import com.google.protobuf.Service;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.Service;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
/**
* A wrapper for HTable. Can be used to restrict privilege.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/RowProcessorClient.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/RowProcessorClient.java
index 5d3cbc2..58637f1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/RowProcessorClient.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/RowProcessorClient.java
@@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest;
import org.apache.hadoop.hbase.regionserver.RowProcessor;
-import com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
/**
* Convenience class that is used to make RowProcessorEndpoint invocations.
* For example usage, refer TestRowProcessorEndpoint
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java
index cc78626..9c53374 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java
@@ -39,11 +39,11 @@ import org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRespo
import org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateService;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.Message;
-import com.google.protobuf.RpcCallback;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.Service;
+import org.apache.hbase.shaded.com.google.protobuf.ByteString;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.RpcCallback;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.Service;
/**
* A concrete AggregateProtocol implementation. Its system level coprocessor
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java
index ab5fc78..b413123 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java
@@ -34,11 +34,11 @@ import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcesso
import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.regionserver.RowProcessor;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.Message;
-import com.google.protobuf.RpcCallback;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.Service;
+import org.apache.hbase.shaded.com.google.protobuf.ByteString;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.RpcCallback;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.Service;
/**
* This class demonstrates how to implement atomic read-modify-writes
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorService.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorService.java
index caf6a14..19f1d27 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorService.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorService.java
@@ -18,7 +18,7 @@
package org.apache.hadoop.hbase.coprocessor;
-import com.google.protobuf.Service;
+import org.apache.hbase.shaded.com.google.protobuf.Service;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/EndpointObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/EndpointObserver.java
index 1076437..68bcf94 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/EndpointObserver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/EndpointObserver.java
@@ -26,8 +26,8 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
-import com.google.protobuf.Message;
-import com.google.protobuf.Service;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.Service;
/**
* Coprocessors implement this interface to observe and mediate endpoint invocations
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java
index e771a92..3750a8a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java
@@ -41,9 +41,9 @@ import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateR
import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse;
import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MultiRowMutationService;
-import com.google.protobuf.RpcCallback;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.Service;
+import org.apache.hbase.shaded.com.google.protobuf.RpcCallback;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.Service;
/**
* This class demonstrates how to implement atomic multi row transactions using
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/SingletonCoprocessorService.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/SingletonCoprocessorService.java
index 88db6b6..98cd444 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/SingletonCoprocessorService.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/SingletonCoprocessorService.java
@@ -18,7 +18,7 @@
package org.apache.hadoop.hbase.coprocessor;
-import com.google.protobuf.Service;
+import org.apache.hbase.shaded.com.google.protobuf.Service;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignException.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignException.java
index 2224414..3429784 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignException.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignException.java
@@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExc
import org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage;
import org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/**
* A ForeignException is an exception from another thread or process.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallRunner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallRunner.java
index a9cf0f1..e609973 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallRunner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallRunner.java
@@ -33,7 +33,7 @@ import org.apache.hadoop.util.StringUtils;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
-import com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
/**
* The request processing logic, which is usually executed in thread pools provided by an
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/PriorityFunction.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/PriorityFunction.java
index f56bf6f..42a12e7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/PriorityFunction.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/PriorityFunction.java
@@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hbase.ipc;
-import com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java
index e0203ab..f48fad1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java
@@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest;
import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader;
import org.apache.hadoop.hbase.util.ReflectionUtils;
-import com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
/**
* RPC Executor that uses different queues for reads and writes.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
index f0aed2e..34ba767 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
@@ -131,13 +131,13 @@ import org.apache.htrace.TraceInfo;
import org.codehaus.jackson.map.ObjectMapper;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
-import com.google.protobuf.BlockingService;
-import com.google.protobuf.CodedInputStream;
-import com.google.protobuf.CodedOutputStream;
-import com.google.protobuf.Descriptors.MethodDescriptor;
-import com.google.protobuf.Message;
-import com.google.protobuf.ServiceException;
-import com.google.protobuf.TextFormat;
+import org.apache.hbase.shaded.com.google.protobuf.BlockingService;
+import org.apache.hbase.shaded.com.google.protobuf.CodedInputStream;
+import org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream;
+import org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.TextFormat;
/**
* An RPC server that hosts protobuf described Services.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerInterface.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerInterface.java
index ab8b485..f900e62 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerInterface.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerInterface.java
@@ -31,10 +31,10 @@ import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.security.authorize.PolicyProvider;
import com.google.common.annotations.VisibleForTesting;
-import com.google.protobuf.BlockingService;
-import com.google.protobuf.Descriptors.MethodDescriptor;
-import com.google.protobuf.Message;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.BlockingService;
+import org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC, HBaseInterfaceAudience.PHOENIX})
@InterfaceStability.Evolving
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
index 37e4e44..f17bcbd 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
@@ -18,7 +18,7 @@
*/
package org.apache.hadoop.hbase.mapreduce;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
import com.codahale.metrics.MetricRegistry;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -798,7 +798,7 @@ public class TableMapReduceUtil {
// pull necessary dependencies
org.apache.zookeeper.ZooKeeper.class,
io.netty.channel.Channel.class,
- com.google.protobuf.Message.class,
+ org.apache.hbase.shaded.com.google.protobuf.Message.class,
com.google.common.collect.Lists.class,
org.apache.htrace.Trace.class,
com.codahale.metrics.MetricRegistry.class);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ExpiredMobFileCleanerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ExpiredMobFileCleanerChore.java
index a21edcc..eda50ef 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ExpiredMobFileCleanerChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ExpiredMobFileCleanerChore.java
@@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.mob.ExpiredMobFileCleaner;
import org.apache.hadoop.hbase.mob.MobConstants;
import org.apache.hadoop.hbase.mob.MobUtils;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
/**
* The Class ExpiredMobFileCleanerChore for running cleaner regularly to remove the expired
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 3b5af42..f5f6bba 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -21,8 +21,8 @@ package org.apache.hadoop.hbase.master;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
-import com.google.protobuf.Descriptors;
-import com.google.protobuf.Service;
+import org.apache.hbase.shaded.com.google.protobuf.Descriptors;
+import org.apache.hbase.shaded.com.google.protobuf.Service;
import java.io.IOException;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java
index 1e6dade..a3bdd81 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java
@@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hbase.master;
-import com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
index 6ee022f..1332c9e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
@@ -95,13 +95,13 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.zookeeper.KeeperException;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.Descriptors;
-import com.google.protobuf.Message;
-import com.google.protobuf.RpcCallback;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.Service;
-import com.google.protobuf.ServiceException;
+import org.apache.hbase.shaded.com.google.protobuf.ByteString;
+import org.apache.hbase.shaded.com.google.protobuf.Descriptors;
+import org.apache.hbase.shaded.com.google.protobuf.Message;
+import org.apache.hbase.shaded.com.google.protobuf.RpcCallback;
+import org.apache.hbase.shaded.com.google.protobuf.RpcController;
+import org.apache.hbase.shaded.com.google.protobuf.Service;
+import org.apache.hbase.shaded.com.google.protobuf.ServiceException;
/**
* Implements the master RPC services.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
index d6802fe..46f6db1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
@@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.quotas.MasterQuotaManager;
import org.apache.hadoop.hbase.security.User;
-import com.google.protobuf.Service;
+import org.apache.hbase.shaded.com.google.protobuf.Service;
/**
* Services Master supplies
@@ -263,7 +263,7 @@ public interface MasterServices extends Server {
*
* required bytes family = 1;
*/
- com.google.protobuf.ByteString getFamily();
+ org.apache.hbase.shaded.com.google.protobuf.ByteString getFamily();
// optional bytes qualifier = 2;
/**
@@ -29,20 +29,20 @@ public final class ColumnAggregationProtos {
/**
* optional bytes qualifier = 2;
*/
- com.google.protobuf.ByteString getQualifier();
+ org.apache.hbase.shaded.com.google.protobuf.ByteString getQualifier();
}
/**
* Protobuf type {@code SumRequest}
*/
public static final class SumRequest extends
- com.google.protobuf.GeneratedMessage
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage
implements SumRequestOrBuilder {
// Use SumRequest.newBuilder() to construct.
- private SumRequest(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private SumRequest(org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
- private SumRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+ private SumRequest(boolean noInit) { this.unknownFields = org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final SumRequest defaultInstance;
public static SumRequest getDefaultInstance() {
@@ -53,20 +53,20 @@ public final class ColumnAggregationProtos {
return defaultInstance;
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
+ private final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
+ public final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SumRequest(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
@@ -94,47 +94,47 @@ public final class ColumnAggregationProtos {
}
}
}
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
+ throw new org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
- public static final com.google.protobuf.Descriptors.Descriptor
+ public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumRequest_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.Builder.class);
}
- public static com.google.protobuf.Parserrequired bytes family = 1;
*/
@@ -144,13 +144,13 @@ public final class ColumnAggregationProtos {
/**
* required bytes family = 1;
*/
- public com.google.protobuf.ByteString getFamily() {
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString getFamily() {
return family_;
}
// optional bytes qualifier = 2;
public static final int QUALIFIER_FIELD_NUMBER = 2;
- private com.google.protobuf.ByteString qualifier_;
+ private org.apache.hbase.shaded.com.google.protobuf.ByteString qualifier_;
/**
* optional bytes qualifier = 2;
*/
@@ -160,13 +160,13 @@ public final class ColumnAggregationProtos {
/**
* optional bytes qualifier = 2;
*/
- public com.google.protobuf.ByteString getQualifier() {
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString getQualifier() {
return qualifier_;
}
private void initFields() {
- family_ = com.google.protobuf.ByteString.EMPTY;
- qualifier_ = com.google.protobuf.ByteString.EMPTY;
+ family_ = org.apache.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
+ qualifier_ = org.apache.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
@@ -181,7 +181,7 @@ public final class ColumnAggregationProtos {
return true;
}
- public void writeTo(com.google.protobuf.CodedOutputStream output)
+ public void writeTo(org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
@@ -200,11 +200,11 @@ public final class ColumnAggregationProtos {
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(1, family_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(2, qualifier_);
}
size += getUnknownFields().getSerializedSize();
@@ -267,24 +267,24 @@ public final class ColumnAggregationProtos {
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(
byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(java.io.InputStream input)
@@ -293,7 +293,7 @@ public final class ColumnAggregationProtos {
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -303,18 +303,18 @@ public final class ColumnAggregationProtos {
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseDelimitedFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(
- com.google.protobuf.CodedInputStream input)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -328,7 +328,7 @@ public final class ColumnAggregationProtos {
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -336,14 +336,14 @@ public final class ColumnAggregationProtos {
* Protobuf type {@code SumRequest}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builderrequired bytes family = 1;
*/
@@ -474,13 +474,13 @@ public final class ColumnAggregationProtos {
/**
* required bytes family = 1;
*/
- public com.google.protobuf.ByteString getFamily() {
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString getFamily() {
return family_;
}
/**
* required bytes family = 1;
*/
- public Builder setFamily(com.google.protobuf.ByteString value) {
+ public Builder setFamily(org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -500,7 +500,7 @@ public final class ColumnAggregationProtos {
}
// optional bytes qualifier = 2;
- private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY;
+ private org.apache.hbase.shaded.com.google.protobuf.ByteString qualifier_ = org.apache.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
/**
* optional bytes qualifier = 2;
*/
@@ -510,13 +510,13 @@ public final class ColumnAggregationProtos {
/**
* optional bytes qualifier = 2;
*/
- public com.google.protobuf.ByteString getQualifier() {
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString getQualifier() {
return qualifier_;
}
/**
* optional bytes qualifier = 2;
*/
- public Builder setQualifier(com.google.protobuf.ByteString value) {
+ public Builder setQualifier(org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -547,7 +547,7 @@ public final class ColumnAggregationProtos {
}
public interface SumResponseOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
+ extends org.apache.hbase.shaded.com.google.protobuf.MessageOrBuilder {
// required int64 sum = 1;
/**
@@ -563,14 +563,14 @@ public final class ColumnAggregationProtos {
* Protobuf type {@code SumResponse}
*/
public static final class SumResponse extends
- com.google.protobuf.GeneratedMessage
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage
implements SumResponseOrBuilder {
// Use SumResponse.newBuilder() to construct.
- private SumResponse(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private SumResponse(org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
- private SumResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+ private SumResponse(boolean noInit) { this.unknownFields = org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final SumResponse defaultInstance;
public static SumResponse getDefaultInstance() {
@@ -581,20 +581,20 @@ public final class ColumnAggregationProtos {
return defaultInstance;
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
+ private final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
+ public final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SumResponse(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
@@ -617,40 +617,40 @@ public final class ColumnAggregationProtos {
}
}
}
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
+ throw new org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
- public static final com.google.protobuf.Descriptors.Descriptor
+ public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumResponse_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.Builder.class);
}
- public static com.google.protobuf.Parserrpc sum(.SumRequest) returns (.SumResponse);
*/
public abstract void sum(
- com.google.protobuf.RpcController controller,
+ org.apache.hbase.shaded.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest request,
- com.google.protobuf.RpcCallbackrpc sum(.SumRequest) returns (.SumResponse);
*/
public abstract void sum(
- com.google.protobuf.RpcController controller,
+ org.apache.hbase.shaded.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest request,
- com.google.protobuf.RpcCallbackrequired bytes family = 1;
*/
- com.google.protobuf.ByteString getFamily();
+ org.apache.hbase.shaded.com.google.protobuf.ByteString getFamily();
// optional bytes qualifier = 2;
/**
@@ -29,20 +29,20 @@ public final class ColumnAggregationWithErrorsProtos {
/**
* optional bytes qualifier = 2;
*/
- com.google.protobuf.ByteString getQualifier();
+ org.apache.hbase.shaded.com.google.protobuf.ByteString getQualifier();
}
/**
* Protobuf type {@code SumRequest}
*/
public static final class SumRequest extends
- com.google.protobuf.GeneratedMessage
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage
implements SumRequestOrBuilder {
// Use SumRequest.newBuilder() to construct.
- private SumRequest(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private SumRequest(org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
- private SumRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+ private SumRequest(boolean noInit) { this.unknownFields = org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final SumRequest defaultInstance;
public static SumRequest getDefaultInstance() {
@@ -53,20 +53,20 @@ public final class ColumnAggregationWithErrorsProtos {
return defaultInstance;
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
+ private final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
+ public final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SumRequest(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
@@ -94,47 +94,47 @@ public final class ColumnAggregationWithErrorsProtos {
}
}
}
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
+ throw new org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
- public static final com.google.protobuf.Descriptors.Descriptor
+ public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_SumRequest_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_SumRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest.Builder.class);
}
- public static com.google.protobuf.Parserrequired bytes family = 1;
*/
@@ -144,13 +144,13 @@ public final class ColumnAggregationWithErrorsProtos {
/**
* required bytes family = 1;
*/
- public com.google.protobuf.ByteString getFamily() {
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString getFamily() {
return family_;
}
// optional bytes qualifier = 2;
public static final int QUALIFIER_FIELD_NUMBER = 2;
- private com.google.protobuf.ByteString qualifier_;
+ private org.apache.hbase.shaded.com.google.protobuf.ByteString qualifier_;
/**
* optional bytes qualifier = 2;
*/
@@ -160,13 +160,13 @@ public final class ColumnAggregationWithErrorsProtos {
/**
* optional bytes qualifier = 2;
*/
- public com.google.protobuf.ByteString getQualifier() {
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString getQualifier() {
return qualifier_;
}
private void initFields() {
- family_ = com.google.protobuf.ByteString.EMPTY;
- qualifier_ = com.google.protobuf.ByteString.EMPTY;
+ family_ = org.apache.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
+ qualifier_ = org.apache.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
@@ -181,7 +181,7 @@ public final class ColumnAggregationWithErrorsProtos {
return true;
}
- public void writeTo(com.google.protobuf.CodedOutputStream output)
+ public void writeTo(org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
@@ -200,11 +200,11 @@ public final class ColumnAggregationWithErrorsProtos {
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(1, family_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
- size += com.google.protobuf.CodedOutputStream
+ size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream
.computeBytesSize(2, qualifier_);
}
size += getUnknownFields().getSerializedSize();
@@ -267,24 +267,24 @@ public final class ColumnAggregationWithErrorsProtos {
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ByteString data,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest parseFrom(
byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest parseFrom(java.io.InputStream input)
@@ -293,7 +293,7 @@ public final class ColumnAggregationWithErrorsProtos {
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest parseFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -303,18 +303,18 @@ public final class ColumnAggregationWithErrorsProtos {
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest parseDelimitedFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest parseFrom(
- com.google.protobuf.CodedInputStream input)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
@@ -328,7 +328,7 @@ public final class ColumnAggregationWithErrorsProtos {
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -336,14 +336,14 @@ public final class ColumnAggregationWithErrorsProtos {
* Protobuf type {@code SumRequest}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builderrequired bytes family = 1;
*/
@@ -474,13 +474,13 @@ public final class ColumnAggregationWithErrorsProtos {
/**
* required bytes family = 1;
*/
- public com.google.protobuf.ByteString getFamily() {
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString getFamily() {
return family_;
}
/**
* required bytes family = 1;
*/
- public Builder setFamily(com.google.protobuf.ByteString value) {
+ public Builder setFamily(org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -500,7 +500,7 @@ public final class ColumnAggregationWithErrorsProtos {
}
// optional bytes qualifier = 2;
- private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY;
+ private org.apache.hbase.shaded.com.google.protobuf.ByteString qualifier_ = org.apache.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
/**
* optional bytes qualifier = 2;
*/
@@ -510,13 +510,13 @@ public final class ColumnAggregationWithErrorsProtos {
/**
* optional bytes qualifier = 2;
*/
- public com.google.protobuf.ByteString getQualifier() {
+ public org.apache.hbase.shaded.com.google.protobuf.ByteString getQualifier() {
return qualifier_;
}
/**
* optional bytes qualifier = 2;
*/
- public Builder setQualifier(com.google.protobuf.ByteString value) {
+ public Builder setQualifier(org.apache.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -547,7 +547,7 @@ public final class ColumnAggregationWithErrorsProtos {
}
public interface SumResponseOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
+ extends org.apache.hbase.shaded.com.google.protobuf.MessageOrBuilder {
// required int64 sum = 1;
/**
@@ -563,14 +563,14 @@ public final class ColumnAggregationWithErrorsProtos {
* Protobuf type {@code SumResponse}
*/
public static final class SumResponse extends
- com.google.protobuf.GeneratedMessage
+ org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage
implements SumResponseOrBuilder {
// Use SumResponse.newBuilder() to construct.
- private SumResponse(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ private SumResponse(org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
- private SumResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+ private SumResponse(boolean noInit) { this.unknownFields = org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final SumResponse defaultInstance;
public static SumResponse getDefaultInstance() {
@@ -581,20 +581,20 @@ public final class ColumnAggregationWithErrorsProtos {
return defaultInstance;
}
- private final com.google.protobuf.UnknownFieldSet unknownFields;
+ private final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
+ public final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SumResponse(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
@@ -617,40 +617,40 @@ public final class ColumnAggregationWithErrorsProtos {
}
}
}
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
+ throw new org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
- public static final com.google.protobuf.Descriptors.Descriptor
+ public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_SumResponse_descriptor;
}
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_SumResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse.Builder.class);
}
- public static com.google.protobuf.Parser